diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000..3a10059 Binary files /dev/null and b/.DS_Store differ diff --git a/.gitignore b/.gitignore index 526dc88..a154876 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +.DS_Store +~$* +*.xlsx *~ myheatmap.py~ *.pyc @@ -11,4 +14,5 @@ myheatmap.py~ *.swp main.py *.svg -*.png \ No newline at end of file +*.png +.DS_Store diff --git a/Algebraic Approach Simulated Two Coupled Resonators.ipynb b/Algebraic Approach Simulated Two Coupled Resonators.ipynb index 2399dc1..eac7168 100644 --- a/Algebraic Approach Simulated Two Coupled Resonators.ipynb +++ b/Algebraic Approach Simulated Two Coupled Resonators.ipynb @@ -6,10 +6,10 @@ "metadata": {}, "outputs": [], "source": [ - "## Update matplotlib to a version that can label bar graphs.\n", - "#!pip install -U matplotlib --user\n", + "## Update matplotlib\n", + "#%pip install -U matplotlib --user\n", "## Install pydoe2\n", - "#!pip install pyDOE2" + "#%pip install pyDOE2" ] }, { @@ -38,6 +38,8 @@ "from time import time\n", "import pyDOE2\n", "import sys\n", + "import warnings\n", + "warnings.filterwarnings(action='once')\n", "\n", "#from decimal import Decimal\n", "#sys.path.append('..') # myheatmap is in parent directory\n", @@ -46,7 +48,7 @@ "\n", "sns.set_context('paper')\n", "\n", - "savefolder = r'G:\\Shared drives\\Horowitz Lab Notes\\Horowitz, Viva - notes and files'\n", + "savefolder = r'G:\\Shared drives\\Horowitz Lab Notes\\Horowitz, Viva - notes and files\\Validating NetMAP simulated data (public share)'\n", "saving = True\n", "os.chdir(savefolder)\n", "\n", @@ -68,47 +70,30 @@ "metadata": {}, "outputs": [], "source": [ + "## Imports from my py files.\n", + "\n", "from myheatmap import myheatmap\n", "from helperfunctions import flatten,listlength,printtime,make_real_iff_real, \\\n", - " store_params, read_params, savefigure, datestring, beep\n", - "from resonatorsimulator import *\n", - "from simulated_experiment import *\n", - "from resonatorstats import *\n", - "from resonatorphysics import *\n", - "from resonatorfrequencypicker import *\n", - "from resonatorSVDanalysis import *\n", - "from resonator_plotting import *\n", + " store_params, read_params, savefigure, datestring, beep, calc_error_interval\n", + "from resonatorsimulator import curve1, theta1, curve2, theta2, realamp1, imamp1, realamp2, imamp2, \\\n", + " curvemono, thetamono, realampmono, imampmono, rsqrdlist, \\\n", + " complex_noise, calculate_spectra, noisyR1ampphase, noisyR2ampphase, SNRknown, SNRs\n", + "from simulated_experiment import describeresonator, measurementdfcalc, compile_rsqrd, \\\n", + " assert_results_length, describe_monomer_results, simulated_experiment\n", + "from resonatorstats import syserr, combinedsyserr, rsqrd\n", + "from resonatorphysics import complexamp, amp, A_from_Z, res_freq_weak_coupling, \\\n", + " approx_Q, approx_width, calcnarrowerW\n", + "from resonatorfrequencypicker import freqpoints, find_freq_from_angle, makemorefrequencies,\\\n", + " create_drive_arrays, find_special_freq, res_freq_numeric, \\\n", + " allmeasfreq_one_res, allmeasfreq_two_res, best_choice_freq_set\n", + "from NetMAP import Zmat, \\\n", + " normalize_parameters_1d_by_force, quadratic_formula, normalize_parameters_to_res1_and_F_2d, \\\n", + " normalize_parameters_to_m1_m2_assuming_2d, normalize_parameters_to_m1_set_k1_set_assuming_2d, \\\n", + " normalize_parameters_to_m1_F_set_assuming_2d, normalize_parameters_assuming_3d\n", + "from resonator_plotting import set_format, text_color_legend, spectrum_plot, plotcomplex, \\\n", + " plot_SVD_results, convert_to_measurementdf\n", "\n", - "# When this runs, an empty graph will appear below." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def text_color_legend():\n", - " l = plt.legend()\n", - " # set text color in legend\n", - " for text in l.get_texts():\n", - " if '1D' in str(text):\n", - " text.set_color(co1)\n", - " elif '2D' in str(text):\n", - " text.set_color(co2)\n", - " elif '3D' in str(text):\n", - " text.set_color(co3)\n", - " return l" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "\"\"\"import matplotlib.font_manager # See list of fonts\n", - "matplotlib.font_manager.findSystemFonts(fontpaths=None, fontext='ttf')\"\"\"" + "# When this runs, an empty graph will appear below (because plotcomplex calls canvas.draw)." ] }, { @@ -121,19 +106,9 @@ "source": [ "# global variables that I promise not to vary\n", "from simulated_experiment import complexamplitudenoisefactor, use_complexnoise\n", - "from resonator_plotting import co1,co2,co3 # color scheme\n", - "\n", - "# Nature says: (https://www.nature.com/npp/authors-and-referees/artwork-figures-tables)\n", - "#Figure width - single image\t86 mm (3.38 in) (should be able to fit into a single column of the printed journal)\n", - "#Figure width - multi-part\t178 mm (7 in) (should be able to fit into a double column of the printed journal)\n", - "#Text size\t8 point (should be readable after reduction - avoid large type or thick lines)\n", - "#Line width\tBetween 0.5 and 1 point\n", - "\n", - "\n", - "set_format()\n", + "from resonator_plotting import co1,co2,co3, figwidth # color scheme\n", "\n", - "plt.figure(figsize = (3.82/2,1))\n", - "plt.plot(1)" + "set_format() # displays an empty graph" ] }, { @@ -142,6 +117,9 @@ "metadata": {}, "outputs": [], "source": [ + "## Set parameters. We will assume these are in SI units for the purpose of these simultions.\n", + "\n", + "\n", "verbose = False\n", "#MONOMER = False\n", "#forceboth = False\n", @@ -155,6 +133,7 @@ "b2_set = np.nan\n", "forceboth = False\n", "\n", + "\n", "\"\"\"#Use functions to make matrices of amplitude and phase for each resonator (with noise)\n", "#define set values (sandbox version.)\n", "resonatorsystem = 1\n", @@ -173,8 +152,8 @@ "MONOMER = False\"\"\"\n", "\n", "\n", - "\n", - "### lightly damped monomer ## this is my official lightly damped monomer\n", + "\"\"\"\n", + "### lightly damped monomer ## this is my official lightly damped monomer for Fig 2.\n", "MONOMER = True\n", "resonatorsystem = 2\n", "m1_set = 4\n", @@ -185,7 +164,7 @@ "maxfreq = 2.01\n", "noiselevel= 10\n", "forceboth = False\n", - "\n", + "\"\"\"\n", "\n", "\"\"\"\n", "### medium damped monomer -- use for demo\n", @@ -199,8 +178,8 @@ "maxfreq = 1.8\n", "noiselevel = 200 # increased 2022-11-16 for demo Fig 1.\n", "\"\"\"\n", - "\"\"\"\n", - "### medium damped monomer -- use for Fig 4, picking frequencies\n", + "\n", + "\"\"\"### medium damped monomer -- use for Fig 4, picking frequencies\n", "resonatorsystem = -3\n", "m1_set = 4\n", "b1_set = .4\n", @@ -209,10 +188,10 @@ "MONOMER = True\n", "minfreq = 1.4\n", "maxfreq = 1.8\n", - "noiselevel = 1\n", - "\"\"\"\n", + "noiselevel = 1\"\"\"\n", "\n", - "\"\"\"## somewhat heavily damped monomer\n", + "\"\"\"\n", + "## somewhat heavily damped monomer\n", "MONOMER = True\n", "resonatorsystem = 4\n", "m1_set = 1\n", @@ -220,18 +199,21 @@ "k1_set = 1\n", "F_set = 1\n", "minfreq = .01\n", - "maxfreq = 5\"\"\"\n", - "\n", + "maxfreq = 5\n", + "\"\"\"\n", "\n", - "\"\"\"### heavily damped monomer\n", + "\"\"\"\n", + "### heavily damped monomer\n", "MONOMER = True\n", "resonatorsystem = 5\n", "m1_set = 4\n", "b1_set = 8\n", "k1_set = 9\n", "F_set = 1\n", - "noiselevel = 10\"\"\"\n", - "\n", + "noiselevel = 10\n", + "minfreq = .01\n", + "maxfreq = 5\n", + "\"\"\"\n", "\n", "\"\"\"\n", "# FORCEBOTH true or false?\n", @@ -248,14 +230,14 @@ "MONOMER = False\n", "noiselevel = 10\n", "\n", - "#forceboth=True\n", - "#resonatorsystem = 6\n", - "\n", - "forceboth = False\n", - "resonatorsystem = 7\n", - "minfreq = .3\n", - "maxfreq = 2.2\"\"\"\n", + "forceboth=True # for SI\n", + "resonatorsystem = 6\n", "\n", + "#forceboth = False\n", + "#resonatorsystem = 7\n", + "#minfreq = .3\n", + "#maxfreq = 2.2\n", + "\"\"\"\n", "\n", "\"\"\"\n", "### Weakly coupled dimer #2\n", @@ -271,6 +253,7 @@ "MONOMER = False\n", "forceboth= False\n", "\"\"\"\n", + "\n", "\"\"\"\n", "## well-separated dimer, 1D then 2D, then 3D. Weakly coupled dimer #3\n", "## But not very accurate.\n", @@ -289,10 +272,10 @@ "forceboth= False\n", "MONOMER = False\n", "\"\"\"\n", - "\"\"\"\n", - "### 1D better # weakly coupled dimer #4\n", + "\n", + "\"\"\"### 1D better # weakly coupled dimer #4\n", "#define set values\n", - "## This is the weakly coupled dimer I am using\n", + "## This is the weakly coupled dimer I am using (Figure 3)\n", "## 2022-11-15 switched back to what I had before.\n", "resonatorsystem = 10\n", "m1_set = 1\n", @@ -307,11 +290,12 @@ "MONOMER = False\n", "forceboth= False\n", "minfreq = .1\n", - "maxfreq = 2.2\"\"\"\n", + "maxfreq = 2.2\n", + "\"\"\"\n", "\n", "\n", "\"\"\"\n", - "## Well-separated dimer / Medium coupled dimer #1\n", + "## Well-separated dimer / Medium coupled dimer #1 / Used for Figure 5.\n", "MONOMER = False\n", "resonatorsystem = 11\n", "m1_set = 8\n", @@ -327,9 +311,11 @@ "minfreq = 0.1\n", "maxfreq = 5\n", "#(but this is 3D for forceboth)\"\"\"\n", + "\n", + "\n", "\"\"\"\n", "### Medium coupled dimer #2\n", - "# This is my official medium coupled dimer.\n", + "# This is my official medium coupled dimer, in SI only\n", "resonatorsystem = 12\n", "m1_set = 11\n", "m2_set = 5\n", @@ -340,13 +326,13 @@ "k12_set = 4\n", "F_set = 1\n", "MONOMER = False\n", - "noiselevel = 10\n", + "noiselevel = 1 # reduced from 10, 2023-01-07 because the results were so poor\n", "forceboth= False\n", "minfreq = .1\n", - "maxfreq = 3\n", - "\"\"\"\n", + "maxfreq = 3\"\"\"\n", "\n", - "\"\"\"## strongly coupled dimer\n", + "\"\"\"\n", + "## strongly coupled dimer in SI only\n", "MONOMER = False\n", "resonatorsystem = 13\n", "m1_set = 8\n", @@ -372,8 +358,8 @@ "b2_set = 5.864\n", "F_set = 1.861\n", "noiselevel = 1\n", - "forceboth = False\n", - "\"\"\"\n", + "forceboth = False\"\"\"\n", + "\n", "\"\"\"\n", "### Does this make sense for Brittany's experimental data?\n", "resonatorsystem = 15\n", @@ -392,6 +378,23 @@ "maxfreq = 150796447 # 21 MHz * (2 * pi) \n", "\"\"\"\n", "\n", + "# creating this in 2025-02 to try to get overlapping resonance peaks. Everyone would say this is weak coupling.\n", + "resonatorsystem = 16 \n", + "m1_set = 11\n", + "m2_set = 5\n", + "b1_set = 0.5\n", + "b2_set = 0.1\n", + "k1_set = 21\n", + "k2_set = 10\n", + "k12_set = .1\n", + "F_set = 1\n", + "MONOMER = False\n", + "forceboth= False\n", + "minfreq = 1.3843945877020478 - .4\n", + "maxfreq = 1.3843945877020478 + .4\n", + "\n", + "## Make calculations for this resonator system\n", + "\n", "res1 = res_freq_weak_coupling(k1_set, m1_set, b1_set)\n", "\n", "\n", @@ -419,6 +422,7 @@ " MONOMER = MONOMER, forceboth=forceboth,\n", " n=n)\n", "\n", + "print('resonatorsystem:', resonatorsystem)\n", "describeresonator(vals_set, MONOMER, forceboth, noiselevel)\n", "print('Drive length:', len(drive), '(for calculating R^2)')\n", "\n", @@ -426,9 +430,11 @@ "if resonatorsystem == 15: # 22.1208 MHz and 23.3554 MHz\n", " desiredfreqs = [22.1208*2 * np.pi * 1e6, 23.3554*2 * np.pi * 1e6]\n", "else:\n", - " desiredfreqs = res_freq_numeric(vals_set=vals_set, MONOMER=MONOMER, forceboth=forceboth, includefreqs = reslist,\n", + " for i in range(7):\n", + " reslist = res_freq_numeric(vals_set=vals_set, MONOMER=MONOMER, forceboth=forceboth, includefreqs = reslist,\n", " minfreq=minfreq, maxfreq = maxfreq,\n", " verboseplot = False, verbose=False, iterations = 3, numtoreturn=2)\n", + " desiredfreqs = reslist\n", "\n", "drive = np.sort(np.unique(np.append(drive, desiredfreqs)))\n", "print('Desired freqs:', desiredfreqs)\n", @@ -441,16 +447,7 @@ "#p = range(len(drive))\n", "print('Index of freqs:', p)\n", "\n", - "beep()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# redo in case it was updated\n", + "\n", "vals_set = store_params(m1_set, m2_set, b1_set, b2_set, k1_set, k2_set, k12_set, F_set, MONOMER=MONOMER)\n", "\n", "R1_amp_noiseless = curve1(drive, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, MONOMER, forceboth=forceboth)\n", @@ -463,6 +460,8 @@ "R2_real_amp_noiseless = realamp2(drive, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, forceboth=forceboth)\n", "R2_im_amp_noiseless = imamp2(drive, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, forceboth=forceboth)\n", "\n", + "plt.plot(drive,R2_amp_noiseless)\n", + "\n", "usenoise = True\n", "\n", "## actually calculate the spectra\n", @@ -470,27 +469,7 @@ "R1_amp, R1_phase, R2_amp, R2_phase, R1_real_amp, R1_im_amp, R2_real_amp, R2_im_amp, _ = noisyspectra\n", "\n", "\n", - "\n", - "'''def vh_from_vals_set(drive, vals_set, MONOMER, forceboth):\n", - " vals_set = store_params(m1_set, m2_set, b1_set, b2_set, k1_set, k2_set, k12_set, F_set, MONOMER=MONOMER)\n", - " \n", - " R1_amp_noiseless = curve1(drive, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, MONOMER, forceboth = forceboth)\n", - " R1_phase_noiseless = theta1(drive, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, MONOMER, forceboth = forceboth)\n", - " R2_amp_noiseless = curve2(drive, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, forceboth = forceboth)\n", - " R2_phase_noiseless = theta2(drive, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, forceboth = forceboth)\n", - " R1_real_amp_noiseless = realamp1(drive, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, MONOMER, forceboth = forceboth)\n", - " R1_im_amp_noiseless = imamp1(drive, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, MONOMER, forceboth = forceboth)\n", - " R2_real_amp_noiseless = realamp2(drive, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, forceboth = forceboth)\n", - " R2_im_amp_noiseless = imamp2(drive, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, forceboth = forceboth)\n", - "\n", - " df = measurementdfcalc(drive, p,R1_amp=R1_amp,R2_amp=R2_amp,R1_phase=R1_phase, R2_phase=R2_phase, \n", - " R1_amp_noiseless=R1_amp_noiseless,R2_amp_noiseless=R2_amp_noiseless,\n", - " R1_phase_noiseless=R1_phase_noiseless, R2_phase_noiseless=R2_phase_noiseless\n", - " )\n", - " Zmatrix = Zmat(df, frequencycolumn = 'drive', complexamplitude1 = 'R1AmpCom', complexamplitude2 = 'R2AmpCom',MONOMER=MONOMER)\n", - " u, s, vh = np.linalg.svd(Zmatrix, full_matrices = True)\n", - " vh = make_real_iff_real(vh)\n", - " return u,s,vh''';\n" + "beep()" ] }, { @@ -511,15 +490,19 @@ "if resonatorsystem == 15: # 22.1208 MHz and 23.3554 MHz\n", " desiredfreqs = [22.1208*2 * np.pi * 1e6, 23.3554*2 * np.pi * 1e6]\n", "else:\n", - " desiredfreqs = res_freq_numeric(vals_set=vals_set, MONOMER=MONOMER, forceboth=forceboth, includefreqs = reslist,\n", - " minfreq=minfreq, maxfreq=maxfreq,\n", - " numtoreturn = 2, iterations = 3, verbose=False)\n", + " desiredfreqs = reslist\n", + " for i in range(5):\n", + " desiredfreqs, method = res_freq_numeric(vals_set=vals_set, MONOMER=MONOMER, forceboth=forceboth, \n", + " includefreqs = desiredfreqs,\n", + " minfreq=minfreq, maxfreq=maxfreq, returnoptions=True, numtoreturn=2, \n", + " use_R2_only=True, # for consideration !!!\n", + " iterations = 3, verbose=False)\n", "drive = np.unique(np.sort(np.append(drive, desiredfreqs)))\n", "p = freqpoints(desiredfreqs = desiredfreqs, drive = drive)\n", "print(\"p:\",p)\n", - "assert len(np.unique(p)) == 2\n", + "#assert len(np.unique(p)) == 2\n", "print(len(drive))\n", - "\n" + "print(method)" ] }, { @@ -591,7 +574,7 @@ " MONOMER=MONOMER, forceboth=forceboth)/np.pi, # true curve\n", " color = 'gray', alpha = 0.2) \n", "ax2.plot(drive, R1_phase/np.pi, '.', color = datacolor) # noisy simulated data\n", - "ax2.set_ylabel('Phase $\\delta$ ($\\pi$)')\n", + "ax2.set_ylabel('Phase $\\phi$ ($\\pi$)')\n", "ax2.set_title('Simulated R1 Phase')\n", "\n", "#For loop to plot chosen values from table\n", @@ -615,7 +598,7 @@ " forceboth=forceboth)/np.pi, # true curve\n", " color = 'gray', alpha = 0.2)\n", "ax4.plot(drive, R2_phase/np.pi, '.', color = datacolor)\n", - "ax4.set_ylabel('Phase $\\delta_2$ ($\\pi$)')\n", + "ax4.set_ylabel('Phase $\\phi_2$ ($\\pi$)')\n", "ax4.set_title('Simulated R2 Phase')\n", "\n", "#For loop to plot R1 amplitude values from table\n", @@ -626,7 +609,9 @@ " plt.sca(ax)\n", " #plt.xticks([res1, res2])\n", " ax.set_xlabel('Freq (rad/s)')\n", - "\n", + " \n", + "for ax in [ax1, ax3]:\n", + " ax.set_yscale('log') # It's an option!\n", " \n", "plt.tight_layout()\n", "\n", @@ -645,16 +630,19 @@ " imamp1(morefrequencies, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, \n", " MONOMER, forceboth=forceboth), \n", " color='gray', alpha = .5)\n", - "ax6.plot(realamp2(morefrequencies, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, forceboth=forceboth), \n", - " imamp2(morefrequencies, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, forceboth=forceboth), \n", - " color='gray', alpha = .5)\n", + "if not MONOMER:\n", + " ax6.plot(realamp2(morefrequencies, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, forceboth=forceboth), \n", + " imamp2(morefrequencies, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, forceboth=forceboth), \n", + " color='gray', alpha = .5)\n", "\n", "plotcomplex(Z1, drive, 'Complex Amplitude $Z_1$', ax=ax5, label_markers=label_markers)\n", "ax5.scatter(np.real(df.R1AmpCom), np.imag(df.R1AmpCom), s=150, facecolors='none', edgecolors='k', label=\"data for SVD\") \n", "\n", - "plotcomplex(Z2, drive, 'Complex Amplitude $Z_2$', ax=ax6, label_markers=label_markers)\n", - "ax6.scatter(np.real(df.R2AmpCom), np.imag(df.R2AmpCom), s=150, facecolors='none', edgecolors='k', label=\"data for SVD\") \n", - "plt.legend() \n", + "if not MONOMER:\n", + " plotcomplex(Z2, drive, 'Complex Amplitude $Z_2$', ax=ax6, label_markers=label_markers)\n", + " ax6.scatter(np.real(df.R2AmpCom), np.imag(df.R2AmpCom), s=150, facecolors='none', edgecolors='k', label=\"data for SVD\") \n", + "plt.legend() \n", + "\n", " \n", "plt.tight_layout()\n", "\n", @@ -662,33 +650,6 @@ "print('resonant phase at:',res_freq_numeric(mode = 'phase',vals_set=vals_set, MONOMER=MONOMER,forceboth=forceboth, includefreqs=reslist,minfreq=minfreq, maxfreq=maxfreq))" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "df" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "df.describe()" - ] - }, { "cell_type": "code", "execution_count": null, @@ -715,7 +676,11 @@ " display(pd.DataFrame(np.array(Zmatrix, dtype = np.double), columns = parameternames))\n", "\n", "#SVD\n", - "u, s, vh = np.linalg.svd(Zmatrix, full_matrices = True)\n", + "try:\n", + " u, s, vh = np.linalg.svd(Zmatrix, full_matrices = True)\n", + "except:\n", + " print('Could not solve')\n", + " \n", "#u, s, vh = sc.linalg.svd(Zmatrix, full_matrices = False, lapack_driver = 'gesvd')\n", "#vh = make_real_iff_real(vh)\n", "\n", @@ -752,13 +717,6 @@ "display(vh)" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, { "cell_type": "code", "execution_count": null, @@ -842,9 +800,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "#View vh array and assign variables to proper row vector\n", @@ -932,7 +888,7 @@ "\n", "plot_SVD_results(drive,R1_amp,R1_phase,R2_amp,R2_phase,df, K1, K2, K12, B1, B2, FD, M1, M2, \n", " vals_set = vals_set, MONOMER=MONOMER, forceboth=forceboth, labelfreqs=drive[p], overlay = False,\n", - " saving=saving) " + " saving=saving);" ] }, { @@ -956,9 +912,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "## what if the null-space is 2D?\n", @@ -1046,7 +1000,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "scrolled": true + }, "outputs": [], "source": [ "print(\"2D nullspace\")\n", @@ -1057,9 +1013,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "## What if it's 3D nullspace?\n", @@ -1154,6 +1108,15 @@ "stophere # next: do 1D, 2D, 3D with Repeats. simulated_experiment()" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "reslist" + ] + }, { "cell_type": "code", "execution_count": null, @@ -1168,15 +1131,27 @@ " n = 20\n", "else:\n", " demo = False\n", + " \n", + "try:\n", + " measurementfreqs\n", + "except NameError:\n", + " measurementfreqs = reslist\n", + " \n", "\n", "if resonatorsystem == 15:\n", " measurementfreqs = desiredfreqs # Brittany's expermental setup\n", "else:\n", - " measurementfreqs, category = res_freq_numeric(vals_set, MONOMER, forceboth,\n", - " mode = 'amp', includefreqs = reslist,\n", + " if resonatorsystem == 11:\n", + " use_R2_only=True\n", + " else:\n", + " use_R2_only=False\n", + " for i in range(5):\n", + " measurementfreqs, category = res_freq_numeric(vals_set, MONOMER, forceboth,\n", + " mode = 'amp', includefreqs = reslist + measurementfreqs,\n", " minfreq=minfreq, maxfreq=maxfreq, morefrequencies=None,\n", " unique = True, veryunique = True, numtoreturn = 2, \n", - " verboseplot = False, plottitle = None, verbose=True, iterations = 3,\n", + " verboseplot = False, plottitle = None, verbose=False, \n", + " iterations = 3, use_R2_only=use_R2_only,\n", " returnoptions = True)\n", "\n", "print(measurementfreqs)\n", @@ -1221,16 +1196,6 @@ "set_format()" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "plt.rcParams\n", - "# display settings" - ] - }, { "cell_type": "code", "execution_count": null, @@ -1241,12 +1206,8 @@ ] }, { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": false - }, - "outputs": [], + "cell_type": "markdown", + "metadata": {}, "source": [ "overlay = False\n", "figsizeoverride1 = None\n", @@ -1265,8 +1226,8 @@ "\n", "# Ran 1000 times in 20.438 sec\n", "# Ran 1000 times in 16.996 sec on desktop with verbose = True\n", - "repeats = 1\n", - "#repeats = 1000\n", + "repeats = 1000\n", + "#repeats = 999\n", "if demo:\n", " repeats = 1\n", " overlay = True\n", @@ -1280,7 +1241,7 @@ " noiselevel=noiselevel, MONOMER=MONOMER, forceboth=forceboth,\n", " overlay=overlay, demo = demo, \n", " figsizeoverride1 = figsizeoverride1, figsizeoverride2 = figsizeoverride2,\n", - " resonatorsystem=resonatorsystem, show_set=False,\n", + " resonatorsystem=resonatorsystem, show_set=True,\n", " repeats=repeats , verbose = verbose, context = 'paper', saving = saving)\n", " try: # repeated experiments results\n", " repeatedexptsres = repeatedexptsres.append(thisres, ignore_index=True)\n", @@ -1290,7 +1251,14 @@ "printtime(repeats, before, after) \n", "display(repeatedexptsres.transpose()) \n", "\n", - "repeatedexptsresmean = repeatedexptsres.mean() " + "repeatedexptsresmean = repeatedexptsres.mean() \n", + "\n", + "if saving:\n", + " datestr = datestring()\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ str(repeats) + \"simulations,\" + datestr + ', noise'+ str(noiselevel)\n", + " repeatedexptsres.to_csv(savename + '.csv')\n", + " print(\"Saved:\", savename + '.csv')\n", + "plt.show()" ] }, { @@ -1299,7 +1267,7 @@ "metadata": {}, "outputs": [], "source": [ - "list(repeatedexptsres.columns)" + "repeatedexptsres['M1_1Ddiscrep']=(repeatedexptsres['M1_1D'] - repeatedexptsres['m1_set'])" ] }, { @@ -1308,87 +1276,206 @@ "metadata": {}, "outputs": [], "source": [ - "repeatedexptsres['sqrtk1m1_set'] = np.sqrt(repeatedexptsres['k1_set']/repeatedexptsres['m1_set'])\n", - "if not MONOMER:\n", - " repeatedexptsres['sqrtk2m2_set'] = np.sqrt(repeatedexptsres['k2_set']/repeatedexptsres['m2_set'])\n", - "for D in ['1D', '2D', '3D']:\n", - " repeatedexptsres['SQRTK1M1_' + D] = np.sqrt(repeatedexptsres['K1_' + D]/repeatedexptsres['M1_' + D])\n", - " if not MONOMER:\n", - " repeatedexptsres['SQRTK2M2_' + D] = np.sqrt(repeatedexptsres['K2_' + D]/repeatedexptsres['M2_' + D])\n", - "\n", - "\n", - "repeatedexptsresmean = repeatedexptsres.mean() " + "repeatedexptsres['B1_1Ddiscrep']=(repeatedexptsres['B1_1D'] - repeatedexptsres['b1_set'])" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ - "#sns.set_context('paper')\n", - "\n", - "describeresonator(vals_set, MONOMER, forceboth, noiselevel)\n", - "saving = True\n", - "figheight = 1.3\n", - "\n", + "repeatedexptsres['K1_1Ddiscrep']=(repeatedexptsres['K1_1D'] - repeatedexptsres['k1_set'])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "Ds= [ '1D', '2D', '3D']\n", "if MONOMER:\n", - " shortkeysummary = ['m1','k1','b1', 'sqrtk1m1']\n", + " Rs = ['1']\n", "else:\n", - " shortkeysummary = ['m1', 'k1', 'sqrtk1m1', 'b1', 'm2', 'k2', 'sqrtk2m2', 'b2', 'k12']\n", - " \n", - "# choose manually\n", - "list_to_show = ['1D', '2D', '3D']\n", - "#list_to_show = ['1D', '3D'] \n", - "\n", - "if resonatorsystem == 12:\n", - " list_to_show = ['1D', '3D']\n", - "if resonatorsystem == 2:\n", - " list_to_show = ['1D', '2D']\n", - "\n", - "shortkeylist = list([])\n", - "shortkeylistset = list([])\n", - "for key in shortkeysummary:\n", - " for D in list_to_show:\n", - " shortkeylist.append(key.upper()+ '_' + D)\n", - " shortkeylistset.append(key.lower() + '_set')\n", - "\n", - " \n", - "#paper-style box and whisker figure about the error.\n", - "set_format()\n", - "\n", - "# create dataframe of signed systematic errors\n", - "signederr = syserr(x_found = (repeatedexptsres[shortkeylist]), \n", - " x_set = np.array(repeatedexptsres[shortkeylistset]), \n", - " absval = False)\n", - "\n", - "\n", - "signederr.rename(columns={'M1_1D': '$m_{1,\\mathrm{1D}}$', 'M2_1D': '$m_{2,\\mathrm{1D}}$',\n", - " 'B1_1D': '$b_{1,\\mathrm{1D}}$', 'B2_1D': '$b_{2,\\mathrm{1D}}$',\n", - " 'K1_1D': '$k_{1,\\mathrm{1D}}$', 'K2_1D': '$k_{2,\\mathrm{1D}}$',\n", - " 'M1_2D': '$m_{1,\\mathrm{2D}}$', 'M2_2D': '$m_{2,\\mathrm{2D}}$',\n", - " 'B1_2D': '$b_{1,\\mathrm{2D}}$', 'B2_2D': '$b_{2,\\mathrm{2D}}$',\n", - " 'K1_2D': '$k_{1,\\mathrm{2D}}$', 'K2_2D': '$k_{2,\\mathrm{2D}}$',\n", - " 'M1_3D': '$m_{1,\\mathrm{3D}}$', 'M2_3D': '$m_{2,\\mathrm{3D}}$',\n", - " 'B1_3D': '$b_{1,\\mathrm{3D}}$', 'B2_3D': '$b_{2,\\mathrm{3D}}$',\n", - " 'K1_3D': '$k_{1,\\mathrm{3D}}$', 'K2_3D': '$k_{2,\\mathrm{3D}}$', \n", - " 'K12_3D': '$k_{12,\\mathrm{3D}}$', 'K12_1D': '$k_{12,\\mathrm{1D}}$', \n", - " 'K12_2D': '$k_{12,\\mathrm{2D}}$', \n", - " 'SQRTK1M1_1D': '$\\sqrt{k_1/m_1}_\\mathrm{,1D}$',\n", - " 'SQRTK1M1_2D': '$\\sqrt{k_1/m_1}_\\mathrm{,2D}$',\n", - " 'SQRTK1M1_3D': '$\\sqrt{k_1/m_1}_\\mathrm{,3D}$',\n", - " 'SQRTK2M2_1D': '$\\sqrt{k_2/m_2}_\\mathrm{,1D}$',\n", - " 'SQRTK2M2_2D': '$\\sqrt{k_2/m_2}_\\mathrm{,2D}$',\n", - " 'SQRTK2M2_3D': '$\\sqrt{k_2/m_2}_\\mathrm{,3D}$',\n", - " },\n", - " inplace=True)\n", + " Rs = ['1','2']\n", "\n", - "#display(signederr)\n", - "#fig, (ax1,ax2) = plt.subplots(2,1, figsize = (3.5,3.5))\n", - "\n", - "if MONOMER:\n", + "for D in Ds:\n", + " for R in Rs:\n", + " repeatedexptsres['M'+ R +'_' + D +'discrep']=(repeatedexptsres['M'+ R +'_' + D] - repeatedexptsres['m'+ R +'_set'])\n", + " repeatedexptsres['B'+ R +'_' + D +'discrep']=(repeatedexptsres['B'+ R +'_' + D] - repeatedexptsres['b'+ R +'_set'])\n", + " repeatedexptsres['K'+ R +'_' + D +'discrep']=(repeatedexptsres['K'+ R +'_' + D] - repeatedexptsres['k'+ R +'_set'])\n", + " repeatedexptsres['M'+ R +'_' + D +'fract_discrep'] = repeatedexptsres['M'+ R +'_' + D +'discrep'] / \\\n", + " repeatedexptsres['m'+ R +'_set']\n", + " repeatedexptsres['B'+ R +'_' + D +'fract_discrep'] = repeatedexptsres['B'+ R +'_' + D +'discrep'] / \\\n", + " repeatedexptsres['b'+ R +'_set']\n", + " repeatedexptsres['K'+ R +'_' + D +'fract_discrep'] = repeatedexptsres['K'+ R +'_' + D +'discrep'] / \\\n", + " repeatedexptsres['k'+ R +'_set']" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "if saving:\n", + " datestr = datestring()\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ str(repeats) + \"simulations,\" + datestr + ', noise'+ str(noiselevel)\n", + " repeatedexptsres.to_csv(savename + '.csv')\n", + " print(\"Saved:\", savename + '.csv')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "repeatedexptsres['1-avg_expt_cartes_rsqrd_1D'][0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "repeatedexptsres['1-expt_A1_rsqrd_1D'][0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "repeatedexptsres['1-expt_realZ1_rsqrd_1D'][0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "repeatedexptsres['1-expt_imZ1_rsqrd_1D'][0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "repeatedexptsres['1-expt_phase1_rsqrd_1D'][0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "list(repeatedexptsres.columns)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "repeatedexptsres['avgsyserr%_1D']" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "repeatedexptsres['sqrtk1m1_set'] = np.sqrt(repeatedexptsres['k1_set']/repeatedexptsres['m1_set'])\n", + "if not MONOMER:\n", + " repeatedexptsres['sqrtk2m2_set'] = np.sqrt(repeatedexptsres['k2_set']/repeatedexptsres['m2_set'])\n", + "for D in ['1D', '2D', '3D']:\n", + " repeatedexptsres['SQRTK1M1_' + D] = np.sqrt(repeatedexptsres['K1_' + D]/repeatedexptsres['M1_' + D])\n", + " if not MONOMER:\n", + " repeatedexptsres['SQRTK2M2_' + D] = np.sqrt(repeatedexptsres['K2_' + D]/repeatedexptsres['M2_' + D])\n", + "\n", + "\n", + "repeatedexptsresmean = repeatedexptsres.mean(numeric_only=True) " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#sns.set_context('paper')\n", + "saving = True \n", + "\n", + "describeresonator(vals_set, MONOMER, forceboth, noiselevel)\n", + "figheight = 1.3\n", + "\n", + "if MONOMER:\n", + " shortkeysummary = ['m1','k1','b1', 'sqrtk1m1']\n", + "else:\n", + " shortkeysummary = ['m1', 'k1', 'sqrtk1m1', 'b1', 'm2', 'k2', 'sqrtk2m2', 'b2', 'k12']\n", + " \n", + "\n", + "if resonatorsystem == 12:\n", + " list_to_show = ['1D', '3D']\n", + " colors = [co1, co3]\n", + "elif resonatorsystem == 2:\n", + " list_to_show = ['1D', '2D']\n", + " colors = [co1, co2]\n", + "else:\n", + " list_to_show = ['1D', '2D', '3D']\n", + " colors = [co1, co2, co3]\n", + " \n", + "shortkeylist = list([])\n", + "shortkeylistset = list([])\n", + "for key in shortkeysummary:\n", + " for D in list_to_show:\n", + " shortkeylist.append(key.upper()+ '_' + D)\n", + " shortkeylistset.append(key.lower() + '_set')\n", + "\n", + " \n", + "#paper-style box and whisker figure about the error.\n", + "set_format()\n", + "\n", + "# create dataframe of signed systematic errors\n", + "signederr = syserr(x_found = (repeatedexptsres[shortkeylist]), \n", + " x_set = np.array(repeatedexptsres[shortkeylistset]), \n", + " absval = False)\n", + "\n", + "\n", + "signederr.rename(columns={'M1_1D': '$m_{1,\\mathrm{1D}}$', 'M2_1D': '$m_{2,\\mathrm{1D}}$',\n", + " 'B1_1D': '$b_{1,\\mathrm{1D}}$', 'B2_1D': '$b_{2,\\mathrm{1D}}$',\n", + " 'K1_1D': '$k_{1,\\mathrm{1D}}$', 'K2_1D': '$k_{2,\\mathrm{1D}}$',\n", + " 'M1_2D': '$m_{1,\\mathrm{2D}}$', 'M2_2D': '$m_{2,\\mathrm{2D}}$',\n", + " 'B1_2D': '$b_{1,\\mathrm{2D}}$', 'B2_2D': '$b_{2,\\mathrm{2D}}$',\n", + " 'K1_2D': '$k_{1,\\mathrm{2D}}$', 'K2_2D': '$k_{2,\\mathrm{2D}}$',\n", + " 'M1_3D': '$m_{1,\\mathrm{3D}}$', 'M2_3D': '$m_{2,\\mathrm{3D}}$',\n", + " 'B1_3D': '$b_{1,\\mathrm{3D}}$', 'B2_3D': '$b_{2,\\mathrm{3D}}$',\n", + " 'K1_3D': '$k_{1,\\mathrm{3D}}$', 'K2_3D': '$k_{2,\\mathrm{3D}}$', \n", + " 'K12_3D': '$k_{12,\\mathrm{3D}}$', 'K12_1D': '$k_{12,\\mathrm{1D}}$', \n", + " 'K12_2D': '$k_{12,\\mathrm{2D}}$', \n", + " 'SQRTK1M1_1D': '$\\sqrt{k_1/m_1}_\\mathrm{,1D}$',\n", + " 'SQRTK1M1_2D': '$\\sqrt{k_1/m_1}_\\mathrm{,2D}$',\n", + " 'SQRTK1M1_3D': '$\\sqrt{k_1/m_1}_\\mathrm{,3D}$',\n", + " 'SQRTK2M2_1D': '$\\sqrt{k_2/m_2}_\\mathrm{,1D}$',\n", + " 'SQRTK2M2_2D': '$\\sqrt{k_2/m_2}_\\mathrm{,2D}$',\n", + " 'SQRTK2M2_3D': '$\\sqrt{k_2/m_2}_\\mathrm{,3D}$',\n", + " },\n", + " inplace=True)\n", + "\n", + "#display(signederr)\n", + "#fig, (ax1,ax2) = plt.subplots(2,1, figsize = (3.5,3.5))\n", + "\n", + "if MONOMER:\n", " boxwhiskerfigsize = (figwidth/2,figheight)\n", "else:\n", " boxwhiskerfigsize = (figwidth*1,figheight)\n", @@ -1409,7 +1496,7 @@ " plt.xticks(rotation=60, ha='right');\n", " ax1.tick_params(axis = \"x\", left=True, bottom=False, pad = -2)\n", " ax1.tick_params(axis='y',length=3)\n", - " plt.ylabel('$({p_i}-{p_{i,set}})/{p_{i,set}}$ (%)');\n", + " plt.ylabel('$\\Delta p_j/p_{j,\\mathrm{in}}$ (%)');\n", " #plt.ylabel(r'$\\frac{{p_i}-{p_{i,set}}}{p_{i,set}} \\cdot 100\\%$');\n", " sns.despine(ax = ax1, bottom = True)\n", " plt.tight_layout()\n", @@ -1423,22 +1510,23 @@ " fig, ax2 = plt.subplots(1,1, figsize = (figwidth/2,figheight), dpi=150)\n", " plt.sca(ax2)\n", " description = 'avgsyserr%'\n", - " dimension = list_to_show\n", - " for D in dimension:\n", + " for i in range(len(list_to_show)):\n", + " D = list_to_show[i]\n", " key = description + '_' + D\n", " #sns.histplot(repeatedexptsres[key],kde=False, stat=\"density\", linewidth=0, label=key, )\n", - " plt.hist(repeatedexptsres[key], bins=20,histtype = 'step', label = D);\n", + " plt.hist(repeatedexptsres[key], bins=20,histtype = 'step', label = D, color = colors[i]);\n", " text_color_legend()\n", " plt.xlabel('Average err (%)')\n", " plt.ylabel('Occurrences')\n", " ax2.set_yticks([])\n", " sns.despine(ax=ax2, left = True)\n", - "\n", + " #plt.xlim(xmax = 0.06)\n", "plt.tight_layout()\n", "if saving:\n", - " datestr = datestring()\n", + " #datestr = datestring()\n", " savename = \"sys\" + str(resonatorsystem) + ','+ \"probdist,\" + datestr\n", " savefigure(savename)\n", + "plt.show()\n", "\n", "#sns.set_context('talk')" ] @@ -1446,9 +1534,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "print('noiselevel:', noiselevel)\n", @@ -1460,30 +1546,34 @@ "fig, (ax1, ax2, ax3) = plt.subplots(3,1, figsize = (figwidth,figwidth))#, gridspec_kw={'hspace': 0}, sharex = 'all')\n", "\n", "if MONOMER:\n", - " Xkey = '1-expt_A1_rsqrd_'\n", + " #Xkey = '1-expt_A1_rsqrd_'\n", + " #xlab = '$1-R_A^2$'\n", + " Xkey = '1-avg_expt_cartes_rsqrd_'\n", + " xlab = '$1-R_\\mathrm{cart}^2$'\n", "else:\n", " Xkey = '1-expt_ampavg_rsqrd_'\n", + " xlab = '$1-R_\\mathrm{A}^2$'\n", " \n", "symb = '.'\n", "\n", "plt.sca(ax1)\n", "plt.loglog(repeatedexptsres[Xkey + '1D'], repeatedexptsres['avgsyserr%_1D'], symb, alpha = .08, label='1D')\n", "#plt.title('1D');\n", - "plt.xlabel('$1-R^2$')\n", + "plt.xlabel(xlab)\n", "plt.ylabel('syserr (%)');\n", "plt.legend()\n", " \n", "plt.sca(ax2)\n", "plt.loglog(repeatedexptsres[Xkey + '2D'], repeatedexptsres['avgsyserr%_2D'], symb, alpha = .08, label='2D')\n", "#plt.title(' 2D');\n", - "plt.xlabel('$1-R^2$')\n", + "plt.xlabel(xlab)\n", "plt.ylabel('syserr (%)');\n", "plt.legend()\n", "\n", "plt.sca(ax3)\n", "plt.loglog(repeatedexptsres[Xkey + '3D'], repeatedexptsres['avgsyserr%_3D'], symb, alpha = .08, label='3D')\n", "#plt.title('3D');\n", - "plt.xlabel('$1-R^2$')\n", + "plt.xlabel(xlab)\n", "plt.ylabel('syserr (%)');\n", "\n", "plt.suptitle('$R^2$ is useful for predicting syserr\\nbut not dimension')\n", @@ -1494,14 +1584,14 @@ "\n", "fig, ax = plt.subplots(1,1, figsize = (figwidth/2,figheight), gridspec_kw={'hspace': 0}, sharex = 'all', dpi=150)\n", "for D in list_to_show:\n", - " plt.loglog(repeatedexptsres[Xkey +D], repeatedexptsres['avgsyserr%_'+ D], symb, markersize=1, alpha = .08, label=D)\n", + " plt.loglog(repeatedexptsres[Xkey +D], repeatedexptsres['avgsyserr%_'+ D], symb, markersize=2, alpha = .08, label=D)\n", " #plt.loglog(repeatedexptsres[Xkey +D][::5], repeatedexptsres['avgsyserr%_'+ D][::5], symb, alpha = .08, label=D)\n", " #plt.loglog(repeatedexptsresmean[Xkey +D], repeatedexptsresmean['avgsyserr%_'+ D] )\n", "#plt.title('1D');\n", - "plt.xlabel('$1-R^2$')\n", + "plt.xlabel(xlab)\n", "#plt.xlim(xmax = 10**-6)\n", "#plt.legend()\n", - "plt.ylabel('err (%)');\n", + "plt.ylabel('Avg err (%)');\n", "if resonatorsystem == 2:\n", " plt.xticks([1e-6,1e-7])\n", "if False:\n", @@ -1519,7 +1609,7 @@ " \n", "\n", " \n", - "display(len(repeatedexptsres.columns)) # 200 -> 142 distributions" + "display('Number of items measured:', len(repeatedexptsres.columns)) # 200 -> 142 distributions" ] }, { @@ -1527,7 +1617,60 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "saving = False\n", + "\n", + "fig, ax = plt.subplots(1,1, figsize = (figwidth/2,figheight), gridspec_kw={'hspace': 0}, sharex = 'all', dpi=150)\n", + "for D in list_to_show:\n", + " plt.loglog(repeatedexptsres[Xkey +D], (repeatedexptsres['avgsyserr%_'+ D])**2, symb, markersize=2, alpha = .08, label=D)\n", + " #plt.loglog(repeatedexptsres[Xkey +D][::5], repeatedexptsres['avgsyserr%_'+ D][::5], symb, alpha = .08, label=D)\n", + " #plt.loglog(repeatedexptsresmean[Xkey +D], repeatedexptsresmean['avgsyserr%_'+ D] )\n", + "#plt.title('1D');\n", + "plt.xlabel(xlab)\n", + "#plt.xlim(xmax = 10**-6)\n", + "#plt.legend()\n", + "plt.ylabel('[Avg err (%)]$^2$');\n", + "if resonatorsystem == 2:\n", + " plt.xticks([1e-6,1e-7])\n", + "if False:\n", + " locmaj = mpl.ticker.LogLocator(numticks=2)\n", + " #ax.yaxis.set_major_locator(locmaj)\n", + " ax.xaxis.set_major_locator(locmaj)\n", + "ax.tick_params(axis='x', which='minor', bottom=True)\n", + "ax.tick_params(axis='y', which='minor', left=True)\n", + "#plt.axis('equal');\n", + "plt.tight_layout()\n", + "if saving:\n", + " datestr = datestring()\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"esqd,Rsqd,manypts,\" + datestr\n", + " savefigure(savename)\n", + " \n", + " \n", + "fig, ax = plt.subplots(1,1, figsize = (figwidth/2,figheight), gridspec_kw={'hspace': 0}, sharex = 'all', dpi=150)\n", + "for D in list_to_show:\n", + " plt.loglog((repeatedexptsres[Xkey +D])**(1/2), (repeatedexptsres['avgsyserr%_'+ D]), symb, markersize=2, alpha = .08, label=D)\n", + " #plt.loglog(repeatedexptsres[Xkey +D][::5], repeatedexptsres['avgsyserr%_'+ D][::5], symb, alpha = .08, label=D)\n", + " #plt.loglog(repeatedexptsresmean[Xkey +D], repeatedexptsresmean['avgsyserr%_'+ D] )\n", + "#plt.title('1D');\n", + "plt.xlabel('sqrt' + xlab)\n", + "#plt.xlim(xmax = 10**-6)\n", + "#plt.legend()\n", + "plt.ylabel('Avg err (%)');\n", + "#if resonatorsystem == 2:\n", + "# plt.xticks([1e-6,1e-7])\n", + "if False:\n", + " locmaj = mpl.ticker.LogLocator(numticks=2)\n", + " #ax.yaxis.set_major_locator(locmaj)\n", + " ax.xaxis.set_major_locator(locmaj)\n", + "ax.tick_params(axis='x', which='minor', bottom=True)\n", + "ax.tick_params(axis='y', which='minor', left=True)\n", + "#plt.axis('equal');\n", + "plt.tight_layout()\n", + "if saving:\n", + " datestr = datestring()\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"R,manypts,\" + datestr\n", + " savefigure(savename)" + ] }, { "cell_type": "code", @@ -1574,9 +1717,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "plotgrid = ((int(math.ceil((len(keylist)+1)/5))),5)\n", @@ -1669,21 +1810,6 @@ "\n" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "\"\"\"syserrlist = [key for key in keylist if 'syserr' in key]\n", - "\n", - "syserrresults = repeatedexptsres[syserrlist] # Do I want violin plots?\n", - "\n", - "sns.violinplot(x=syserrlist,y=syserrresults ,\n", - " fontsize=7, rot=90)\n", - " \"\"\";" - ] - }, { "cell_type": "code", "execution_count": null, @@ -1724,7 +1850,7 @@ " shortkeylist = flatten([[key.upper() + '_1D', key.upper() + '_2D'] for key in shortkeysummary])\n", " shortkeylistset = flatten([[key.lower() + '_set']*2 for key in shortkeysummary])\n", " \n", - "#***aiming to make a publishable box and whisker figure about the error.\n", + "# box and whisker figure about the error.\n", "\n", "# create dataframe of signed systematic errors\n", "signederr = syserr(x_found = (repeatedexptsres[shortkeylist]), \n", @@ -1750,6 +1876,7 @@ "ax.boxplot(abs(signederr), notch=True, \n", " vert=None, patch_artist=None, widths=None, meanline = True,\n", " labels=signederr.columns); \n", + "plt.xticks(rotation=90);\n", "plt.ylabel('abs$(({p_i}-{p_{i,set}})/{p_{i,set}}) \\cdot 100\\%$');\n", "\n", "\n", @@ -1791,15 +1918,6 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": false - }, - "outputs": [], "source": [ "fig, axs = plt.subplots(plotgrid[0], plotgrid[1], figsize = figsizefull)\n", "\n", @@ -1821,7 +1939,8 @@ "print('Showing ', count, ' plots')\n", "printtime(count, before, after)\n", "print('Some of these are the folded normal (half normal) distribution')\n", - "plt.tight_layout()" + "plt.tight_layout()\n", + "plt.show()" ] }, { @@ -1855,9 +1974,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "if MONOMER:\n", @@ -2035,6 +2152,20 @@ " pass" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import importlib\n", + "import sim_series_of_experiments\n", + "\n", + "# Make changes to the module code\n", + "\n", + "importlib.reload(sim_series_of_experiments)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -2044,7 +2175,8 @@ "outputs": [], "source": [ "\"\"\" Vary the number of measurement frequencies / vary num p / vary nump \"\"\"\n", - " \n", + "from sim_series_of_experiments import vary_num_p_with_fixed_freqdiff\n", + " \n", "W1 = approx_width(k = k1_set, m = m1_set, b=b1_set)\n", "if MONOMER:\n", " W = W1\n", @@ -2059,31 +2191,94 @@ "# Ran 100 times in 7.121 sec\n", "# Ran 100 times in 78.661 sec with verbose = True (only counts the first repeat).\n", "# Ran 100 times in 786.946 sec with verbose = False\n", - "repeats = 100\n", - "verbose = False # if False, still shows one graph for each dimension\n", - "freqdiff = round(W/6,4)\n", + "repeats = 99\n", + "#repeats = 1\n", + "verbose = False # if False, still shows 2 graphs for each dimension\n", + "freqdiff = round(W/10,4)\n", "print('freqdiff:', freqdiff)\n", + "\n", + "if MONOMER:\n", + " overlay = True\n", + "else:\n", + " overlay = False\n", + " \n", + "figsizeoverride1 = None # default\n", + "figsizeoverride2 = None # default\n", + "if resonatorsystem == -3: # Monomer: set width, height\n", + " # spectra amplitude & phase\n", + " figsizeoverride1 = (2.1258, 1.4)\n", + " # complex plot\n", + " figsizeoverride2 = (figwidth/2, 1.4)\n", + "elif resonatorsystem == 2: # Monomer: set width, height\n", + " # spectra amplitude & phase\n", + " figsizeoverride1 = (2.1258, 1.3)\n", + " # complex plot\n", + " figsizeoverride2 = (figwidth/2, 1.3)\n", + "elif resonatorsystem == 10: # dimer\n", + " # spectra amplitude & phase\n", + " figsizeoverride1 = (figwidth, 1.45) #1.864736842105263)\n", + " # complex plot\n", + " figsizeoverride2 = (figwidth, 1.48)\n", + "\n", + "\n", + "if resonatorsystem == 11:\n", + " use_R2_only=True\n", + "else:\n", + " use_R2_only=False\n", + "\n", "before = time()\n", - "for i in range(1): # don't do repeats at this level.\n", - " thisres = vary_num_p_with_fixed_freqdiff( vals_set, noiselevel, \n", - " MONOMER, forceboth,reslist,\n", + "for i in range(1): # don't do repeats at this level. ***\n", + " thisres, plot_info_1D = vary_num_p_with_fixed_freqdiff( vals_set, noiselevel, \n", + " MONOMER, forceboth,reslist = reslist,\n", " minfreq=minfreq, maxfreq = maxfreq,\n", - " verbose = verbose, just_res1 = True, \n", - " max_num_p=max_num_p, reslist = reslist,\n", + " verbose = verbose, just_res1 = False, \n", + " max_num_p=max_num_p, \n", " freqdiff = freqdiff,\n", " n=n, # number of frequencies for R^2\n", - " noiselevel= 1, repeats = repeats,\n", + " repeats = repeats, \n", + " overlay = overlay, saving = saving,\n", + " context = 'paper', resonatorsystem = resonatorsystem,\n", + " use_R2_only=use_R2_only,\n", + " figsizeoverride1 = figsizeoverride1, figsizeoverride2 = figsizeoverride2,\n", " recalculate_randomness = False)\n", " verbose = False\n", " try:\n", " resultsvarynump = resultsvarynump.append(thisres, ignore_index=True)\n", " except:\n", " resultsvarynump = thisres\n", + " \n", + "datestr = datestring()\n", + "resultsvarynump.to_csv(os.path.join(savefolder,\n", + " datestr + \"resultsvarynump.csv\"));\n", + "resultsvarynump.to_pickle(os.path.join(savefolder,\n", + " datestr + 'resultsvarynump.pkl'))\n", + "print('Saved: ' + os.path.join(savefolder,\n", + " datestr + 'resultsvarynump.csv'))\n", + " \n", "after = time()\n", "printtime(repeats, before, after) \n", "display(resultsvarynump.transpose())\n", "\n", + "[plot_info_1D_drive,R1_amp,R1_phase,R2_amp,R2_phase, plot_info_1D_df, K1, K2, K12, B1, B2, FD, M1, M2, plot_info_1D_vals_set, \n", + " plot_info_1D_MONOMER, plot_info_1D_forceboth, plot_info_1D_labelcounts, plot_info_1D_overlay,\n", + " _, _, _, plot_info_1D_demo,\n", + " _, show_set,\n", + " figsizeoverride1, figsizeoverride2] = plot_info_1D\n", + "Z1 = R1_amp * np.exp(R1_phase *1j)\n", + "if not MONOMER:\n", + " Z2 = R2_amp * np.exp(R2_phase *1j)\n", + "\n", + "\"\"\"\n", + "in simulated_experiment.py:\n", + " plot_info_1D = [drive,R1_amp,R1_phase,R2_amp,R2_phase, df, K1, K2, K12, B1, B2, FD, M1, M2, vals_set, \n", + " MONOMER, forceboth, labelcounts, overlay,\n", + " context, saving, '1D', demo,\n", + " resonatorsystem, show_set,\n", + " figsizeoverride1, figsizeoverride2]\n", + "\"\"\"\n", + "\n", "resultsvarynumpmean = resultsvarynump.groupby(by=['num frequency points'],as_index=False).mean()\n", + "datestr = datestring()\n", "\n", "verbose = False\n", "\n", @@ -2098,7 +2293,286 @@ " n = 100, # number of frequencies for R^2\n", " freqdiff = .1,just_res1 = False, repeats = 100,\n", " verbose = False,recalculate_randomness=True ):\n", - "\"\"\"" + "\"\"\";" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "resultsvarynump['num frequency points'].nunique()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "len(resultsvarynump)/ resultsvarynump['num frequency points'].nunique()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "len(resultsvarynump)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "print('standard deviation')\n", + "resultsvarynump.groupby(by=['num frequency points'],as_index=False)['avgsyserr%_3D'].std()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "print('standard error')\n", + "resultsvarynump.groupby(by=['num frequency points'],as_index=False)['avgsyserr%_3D'].std() / np.sqrt(len(resultsvarynump)/ resultsvarynump['num frequency points'].nunique())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "resultsvarynumpmean[['num frequency points','avgsyserr%_1D', 'avgsyserr%_2D','avgsyserr%_3D']]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "resultsvarynumpmean[['avgsyserr%_2D']].min()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plt.plot(resultsvarynumpmean['num frequency points'][0:11],resultsvarynumpmean[['avgsyserr%_1D', 'avgsyserr%_2D','avgsyserr%_3D']][0:11])\n", + "#plt.gca().ylims(ymax=100)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "resultsvarynumporig = resultsvarynump\n", + "resultsvarynump = resultsvarynumporig.copy()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "desiredfreqs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plot_info_1D_df[0:2 ]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#plotcomplex(Z2, plot_info_1D_drive)\n", + "saving = True\n", + "show_set = True\n", + "labelcounts = True\n", + "bigcircle = 23\n", + "if not MONOMER:\n", + " #figsize = (1.5, 1.3)\n", + " #figsize = (1.7, 1.4)\n", + " figsize = (1.76, 1.5)\n", + "\n", + " plt.figure(figsize = figsize, dpi=600)\n", + " \n", + " if show_set:\n", + " # subtle grey line\n", + " plt.plot(realamp1(morefrequencies, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, \n", + " 0,MONOMER=MONOMER, forceboth=forceboth,), \n", + " imamp1(morefrequencies, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, \n", + " 0,MONOMER=MONOMER, forceboth=forceboth,), \n", + " color='gray', alpha = .5, lw = 0.5, zorder = 1)\n", + "\n", + " # axes\n", + " plt.axvline(0, color = 'k', linestyle='solid', linewidth = .5, zorder = 3)\n", + " plt.axhline(0, color = 'k', linestyle='solid', linewidth = .5, zorder = 4)\n", + " sc = plt.scatter(np.real(Z1), np.imag(Z1), c = plot_info_1D_drive, s=10, \n", + " cmap = 'rainbow', vmin=0, zorder = 2) # option 3: s=4.\n", + " cbar = plt.colorbar(sc)\n", + " cbar.outline.set_visible(False)\n", + " ax = plt.gca()\n", + "\n", + " # axes labels\n", + " ax.set_xlabel('$\\mathrm{Re}(Z)$ (m)')\n", + " ax.set_ylabel('$\\mathrm{Im}(Z)$ (m)')\n", + " ax.axis('equal');\n", + " \"\"\" plt.gcf().canvas.draw() # draw so I can get xlim and ylim.\n", + " ymin, ymax = ax.get_ylim()\n", + " xmin, xmax = ax.get_xlim()\"\"\"\n", + " ax6 = plt.gca()\n", + "\n", + " # plus signs\n", + " ax6.scatter(np.real(plot_info_1D_df.R1AmpCom), np.imag(plot_info_1D_df.R1AmpCom), \n", + " marker = '+', color = 'w', lw = 0.5, s = 5,\n", + " #s=5, facecolors='none', edgecolors='k', lw = 0.5, # option 3\n", + " #s=1, facecolors='w', edgecolors='k', lw = 0.5, \n", + " label=\"points for analysis\", zorder = 7) \n", + " \n", + " \n", + " # black circles\n", + " ax6.scatter(np.real(plot_info_1D_df.R1AmpCom[0:2 ]), np.imag(plot_info_1D_df.R1AmpCom[0: 2]), \n", + " s=bigcircle, facecolors='none', edgecolors='k', label=\"points for analysis\", zorder = 6)\n", + " \n", + " # black dashed line\n", + " ax6.plot(realamp1(morefrequencies, K1, K2, K12, B1, B2, FD, M1, M2, 0,forceboth=forceboth, MONOMER=MONOMER), \n", + " imamp1(morefrequencies, K1, K2, K12, B1, B2, FD, M1, M2, 0,forceboth=forceboth, MONOMER=MONOMER), \n", + " '--', color='black', alpha = 1, lw = 0.7, zorder = 5)\n", + " if labelcounts: # this doesn't work\n", + " for i in range(0,len(plot_info_1D_df)//4,2):\n", + " plt.annotate(text=str(i+1), \n", + " xy=(np.real(plot_info_1D_df.R1AmpCom[i]), \n", + " np.imag(plot_info_1D_df.R1AmpCom[i])),\n", + " xytext = (np.real(plot_info_1D_df.R1AmpCom[i])+.07,\n", + " np.imag(plot_info_1D_df.R1AmpCom[i]) - .07) )\n", + " \n", + " plt.xlabel('Re($Z_1$) (m)')\n", + " plt.ylabel('Im($Z_1$) (m)')\n", + " #plt.xlim((-0.11, 0.10))\n", + " #plt.ylim((-.02, .18))\n", + " \n", + " plt.tight_layout()\n", + " if saving:\n", + " datestr = datestring()\n", + " filename = 'sys' + str(resonatorsystem) + ',' + datestr + 'spectrumZ1_1D_zoomin' \n", + " savefigure(filename)\n", + " plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "## plotcomplex(Z2, plot_info_1D_drive)\n", + "saving = True\n", + "show_set = True\n", + "labelcounts = True\n", + "bigcircle = 23\n", + "if not MONOMER:\n", + " #figsize = (2.1, 1.7715)\n", + " #figsize = (1.8, 1.4)\n", + " figsize = (1.76, 1.5)\n", + " \n", + " plt.figure(figsize = figsize, dpi=600)\n", + " \n", + " if show_set:\n", + " # subtle grey line\n", + " plt.plot(realamp2(morefrequencies, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, \n", + " 0,MONOMER=MONOMER, forceboth=forceboth,), \n", + " imamp2(morefrequencies, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, \n", + " 0,MONOMER=MONOMER, forceboth=forceboth,), \n", + " color='gray', alpha = .5, lw = 0.5, zorder = 1)\n", + "\n", + " # axes\n", + " plt.axvline(0, color = 'k', linestyle='solid', linewidth = .5, zorder = 3)\n", + " plt.axhline(0, color = 'k', linestyle='solid', linewidth = .5, zorder = 4)\n", + " sc = plt.scatter(np.real(Z2), np.imag(Z2), c = plot_info_1D_drive, s=10, \n", + " cmap = 'rainbow', zorder = 2, vmin=0) # option 3: s=4.\n", + " cbar = plt.colorbar(sc)\n", + " cbar.outline.set_visible(False)\n", + " ax = plt.gca()\n", + "\n", + " # axes labels\n", + " ax.set_xlabel('$\\mathrm{Re}(Z)$ (m)')\n", + " ax.set_ylabel('$\\mathrm{Im}(Z)$ (m)')\n", + " ax.axis('equal');\n", + " \"\"\" plt.gcf().canvas.draw() # draw so I can get xlim and ylim.\n", + " ymin, ymax = ax.get_ylim()\n", + " xmin, xmax = ax.get_xlim()\"\"\"\n", + " ax6 = plt.gca()\n", + "\n", + " # plus signs\n", + " ax6.scatter(np.real(plot_info_1D_df.R2AmpCom), np.imag(plot_info_1D_df.R2AmpCom), \n", + " marker = '+', color = 'w', lw = 0.5, s = 5,\n", + " #s=5, facecolors='none', edgecolors='k', lw = 0.5, # option 3\n", + " #s=1, facecolors='w', edgecolors='k', lw = 0.5, \n", + " label=\"points for analysis\", zorder = 7) \n", + " \n", + " \n", + " # black circles\n", + " ax6.scatter(np.real(plot_info_1D_df.R2AmpCom[0:2 ]), np.imag(plot_info_1D_df.R2AmpCom[0: 2]), \n", + " s=bigcircle, facecolors='none', edgecolors='k', label=\"points for analysis\", zorder = 6)\n", + " \n", + " # black dashed line\n", + " ax6.plot(realamp2(morefrequencies, K1, K2, K12, B1, B2, FD, M1, M2, 0,forceboth=forceboth,), \n", + " imamp2(morefrequencies, K1, K2, K12, B1, B2, FD, M1, M2, 0,forceboth=forceboth,), \n", + " '--', color='black', alpha = 1, lw = 0.7, zorder = 5)\n", + " if labelcounts: # this doesn't work\n", + " for i in range(0,len(plot_info_1D_df)//4,2):\n", + " plt.annotate(text=str(i+1), \n", + " xy=(np.real(plot_info_1D_df.R2AmpCom[i]), \n", + " np.imag(plot_info_1D_df.R2AmpCom[i])),\n", + " xytext = (np.real(plot_info_1D_df.R2AmpCom[i])+.05,\n", + " np.imag(plot_info_1D_df.R2AmpCom[i]) - .02) )\n", + " \n", + " plt.xlabel('Re($Z_2$) (m)')\n", + " plt.ylabel('Im($Z_2$) (m)')\n", + " #plt.xlim((-0.11, 0.10))\n", + " #plt.ylim((-.02, .18))\n", + " \n", + " plt.tight_layout()\n", + " if saving:\n", + " filename = 'sys' + str(resonatorsystem) + ',' + datestr + 'spectrumZ2_1D_zoomin' \n", + " savefigure(filename)\n", + " plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "resultsvarynumpmean.SNR_R1_f1[0]" ] }, { @@ -2110,6 +2584,15 @@ "describeresonator(vals_set=vals_set, MONOMER=MONOMER, forceboth=forceboth, noiselevel=noiselevel)" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "resultsvarynump, resultsvarynumpmean = calc_error_interval(resultsvarynump, resultsvarynumpmean, groupby='num frequency points', fractionofdata = .95)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -2117,35 +2600,165 @@ "outputs": [], "source": [ "print('Noiselevel: ' + str(noiselevel))\n", - "\n", "symb = '.' # plotting style\n", + "lw = 0.5\n", "co1 = 'C0'\n", "co2 = 'C1'\n", "co3 = 'C2'\n", + "#saving = False\n", "\n", - "plt.figure()\n", + "set_format()\n", + "reps = int(len(resultsvarynump) / len(resultsvarynumpmean))\n", + "\n", + "figsize = (figwidth, 1.48)\n", + "if resonatorsystem == -3: # Monomer:\n", + " figsize = (2.8, 1.4)\n", + "\n", + "plt.figure(figsize=figsize)\n", "#plt.plot(resultsvarynump['num frequency points'],resultsvarynump['avgsyserr%_3D'], symb, alpha = .1, color = co3 )\n", "#plt.plot(resultsvarynump['num frequency points'],resultsvarynump['avgsyserr%_2D'], symb, alpha = .1, color = co2)\n", "plt.plot(resultsvarynump['num frequency points'],resultsvarynump['avgsyserr%_1D'], symb, alpha = .1, color = co1)\n", "#plt.plot(resultsvarynumpmean['num frequency points'],resultsvarynumpmean['avgsyserr%_3D'], label='3D', color = co3)\n", "#plt.plot(resultsvarynumpmean['num frequency points'],resultsvarynumpmean['avgsyserr%_2D'], label='2D', color = co2)\n", "plt.plot(resultsvarynumpmean['num frequency points'],resultsvarynumpmean['avgsyserr%_1D'], label='1D', color = co1)\n", - "plt.legend()\n", + "text_color_legend()\n", "#plt.gca().set_yscale('log')\n", - "plt.xlabel('num frequency points')\n", + "plt.xlabel('num frequency points');\n", "plt.ylabel('Avg err (%)')\n", + "plt.tight_layout()\n", + "if saving:\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"numpvsE,1D,\" + datestr + ', noise'+ str(noiselevel)\n", + " savefigure(savename)\n", + "plt.show()\n", "\n", - "plt.figure()\n", + "plt.figure(figsize=figsize)\n", "plt.plot(resultsvarynump['num frequency points'],resultsvarynump['avgsyserr%_3D'], symb, alpha = .1, color = co3 )\n", "plt.plot(resultsvarynump['num frequency points'],resultsvarynump['avgsyserr%_2D'], symb, alpha = .1, color = co2)\n", "plt.plot(resultsvarynump['num frequency points'],resultsvarynump['avgsyserr%_1D'], symb, alpha = .1, color = co1)\n", "plt.plot(resultsvarynumpmean['num frequency points'],resultsvarynumpmean['avgsyserr%_3D'], label='3D', color = co3)\n", "plt.plot(resultsvarynumpmean['num frequency points'],resultsvarynumpmean['avgsyserr%_2D'], label='2D', color = co2)\n", "plt.plot(resultsvarynumpmean['num frequency points'],resultsvarynumpmean['avgsyserr%_1D'], label='1D', color = co1)\n", - "plt.legend()\n", + "text_color_legend()\n", + "plt.xlim(xmin=0)\n", "plt.gca().set_yscale('log')\n", - "plt.xlabel('num frequency points')\n", + "#plt.xlabel('num frequency points')\n", + "plt.xlabel('number of frequency points');\n", + "plt.ylabel('Avg err (%)')\n", + "plt.tight_layout()\n", + "if saving:\n", + " datestr = datestring()\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"numpvsE,log,\" + datestr + ', noise'+ str(noiselevel)\n", + " savefigure(savename)\n", + " resultsvarynump[['num frequency points','avgsyserr%_1D','avgsyserr%_2D','avgsyserr%_3D']].to_csv(savename + '.csv')\n", + "plt.show()\n", + "\n", + "# ***\n", + "plt.figure(figsize=figsize)\n", + "axa = plt.gca()\n", + "dimensions = ['3D', '2D', '1D']\n", + "colors = [co3, co2, co1]\n", + "X = resultsvarynumpmean['num frequency points']\n", + "for i in range(3):\n", + " Yhigh = resultsvarynumpmean['E_upper_' + dimensions[i]]\n", + " Ylow = resultsvarynumpmean['E_lower_' + dimensions[i]] \n", + " plt.plot(X, Yhigh, color = colors[i], alpha = .3, linewidth=.3)\n", + " plt.plot(X, Ylow, color = colors[i], alpha = .3, linewidth=.3)\n", + " axa.fill_between(X, Ylow, Yhigh, color = colors[i], alpha=.2)\n", + "plt.plot(resultsvarynumpmean['num frequency points'],resultsvarynumpmean['avgsyserr%_3D'], label='3D', color = co3)\n", + "plt.plot(resultsvarynumpmean['num frequency points'],resultsvarynumpmean['avgsyserr%_2D'], label='2D', color = co2)\n", + "plt.plot(resultsvarynumpmean['num frequency points'],resultsvarynumpmean['avgsyserr%_1D'], label='1D', color = co1)\n", + "#text_color_legend()\n", + "plt.xlim(xmin=0)\n", + "plt.yscale('log')\n", + "#plt.xlabel('num frequency points')\n", + "plt.xlabel('number of frequency points');\n", "plt.ylabel('Avg err (%)')\n", + "plt.tight_layout()\n", + "if saving:\n", + " datestr = datestring()\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"numpvsE,log,cleaned,\" + datestr + ', noise'+ str(noiselevel)\n", + " savefigure(savename)\n", + "plt.show()\n", + "\n", + "plt.figure(figsize = figsize)\n", + "x = resultsvarynump['num frequency points']\n", + "sns.violinplot(x=x, y=resultsvarynump['log avgsyserr%_3D'], \n", + " positions=x.unique(), \n", + " color = co3,\n", + " saturation = .5,\n", + " lw = 2,\n", + " inner = None,\n", + " label='2D', fontsize=7, rot=0 )\n", + "sns.violinplot(x=x, y=resultsvarynump['log avgsyserr%_2D'], \n", + " positions=x.unique(), \n", + " color = co2,\n", + " saturation = .5,\n", + " lw = 0.1,\n", + " inner = None,\n", + " label='2D', fontsize=7, rot=0 )\n", + "sns.violinplot(x=x, y=resultsvarynump['log avgsyserr%_1D'], \n", + " positions=x.unique(), \n", + " color = co1,\n", + " saturation = .5,\n", + " lw = 0.3,\n", + " inner = None,\n", + " label='1D', fontsize=7, rot=0)\n", + "ax = plt.gca()\n", + "plt.setp(ax.collections, alpha=.7)\n", + "for i in range(max_num_p*3-3):\n", + " ax.collections[i].set_linewidth(.1)\n", + "plt.plot(resultsvarynumpmean['num frequency points']-2,resultsvarynumpmean['log avgsyserr%_3D'], lw = lw, color = co3)\n", + "plt.plot(resultsvarynumpmean['num frequency points']-2,resultsvarynumpmean['log avgsyserr%_2D'], lw = lw, color = co2)\n", + "plt.plot(resultsvarynumpmean['num frequency points']-2,resultsvarynumpmean['log avgsyserr%_1D'], lw = lw, color = co1)\n", + "plt.plot(resultsvarynumpmean['num frequency points']-2,resultsvarynumpmean['log avgsyserr%_3D'], '.', ms = 2, color = co3)\n", + "plt.plot(resultsvarynumpmean['num frequency points']-2,resultsvarynumpmean['log avgsyserr%_2D'], '.', ms = 2, color = co2)\n", + "plt.plot(resultsvarynumpmean['num frequency points']-2,resultsvarynumpmean['log avgsyserr%_1D'], '.', ms = 2, color = co1)\n", + "plt.ylabel('Avg err (%)')\n", + "plt.xlim(xmin=-2)\n", + "xt = list(range(-2,max_num_p-1,5))\n", + "xt = xt + [2-2]\n", + "plt.xticks(xt);\n", + "yt,_ = plt.yticks()\n", + "if MONOMER:\n", + " yt = yt[1:-1]\n", + "elif resonatorsystem == 11:\n", + " yt = range(-3,7,1)\n", + " #ytminor = np.arange(-3,4,.1)\n", + " #plt.yticks(ytminor, [10**y for y in ytminor], axis = 'minor',)\n", + "print(yt)\n", + "#plt.gca().Axes.set_ylabels([10**y for y in yt]) # undo the log.\n", + "plt.yticks(yt,[10**y for y in yt] );\n", + "plt.yticks([], minor=True)\n", + "\n", + "#plt.ticklabel_format(axis='y', style='sci', ) # AttributeError: This method only works with the ScalarFormatter\n", + "#plt.legend()\n", + "plt.tight_layout()\n", + "if saving:\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"numpvsE,log,violin,\" + datestr + ', noise'+ str(noiselevel)\n", + " savefigure(savename)\n", + "plt.show()\n", + "\n", + "\"\"\"plt.figure(figsize = figsize)\n", + "resultsvarynump.boxplot(column = 'log avgsyserr%_1D', by = 'num frequency points', grid=False, fontsize=7, \n", + " #positions =resultsvarynoiselevel['log meanSNR_R1.unique(),widths=widths, \n", + " #color = 'k',\n", + " flierprops={'marker': '.', 'markersize': 1, 'markerfacecolor': 'k', 'alpha': .1},\n", + " showmeans = True,\n", + " manage_ticks = True,\n", + " figsize=figsize);\n", + "plt.xticks(list(range(-1,max_num_p,5)), rotation= 0)\n", + "#plt.yscale('log')\n", + "\n", + "resultsvarynump.boxplot(column = 'log avgsyserr%_2D', by = 'num frequency points', grid=False, fontsize=7, \n", + " #positions =resultsvarynoiselevel['log meanSNR_R1.unique(),widths=widths, \n", + " #color = 'k',\n", + " flierprops={'marker': '.', 'markersize': 1, 'markerfacecolor': 'k', 'alpha': .1},\n", + " showmeans = True,\n", + " manage_ticks = True,\n", + " figsize=figsize);\n", + "plt.xticks(list(range(-1,max_num_p,5)), rotation= 0)\n", + "#plt.yscale('log')\"\"\"\n", + "\n", "\n", "plt.figure()\n", "plt.plot(resultsvarynump['num frequency points'],resultsvarynump['K1syserr%_3D'], symb, alpha = .3 , color = co3)\n", @@ -2154,7 +2767,7 @@ "plt.plot(resultsvarynumpmean['num frequency points'],resultsvarynumpmean['K1syserr%_3D'], label='3D', color = co3)\n", "plt.plot(resultsvarynumpmean['num frequency points'],resultsvarynumpmean['K1syserr%_2D'], label='2D', color = co2)\n", "plt.plot(resultsvarynumpmean['num frequency points'],resultsvarynumpmean['K1syserr%_1D'], label='1D', color = co1)\n", - "plt.legend()\n", + "text_color_legend()\n", "plt.gca().set_yscale('log')\n", "plt.xlabel('num frequency points')\n", "plt.ylabel('k1 syserr (%)')\n", @@ -2162,7 +2775,7 @@ "plt.figure()\n", "#plt.plot(resultsvarynump['R1Ampsyserr%mean(priv)'],resultsvarynump['K1syserr%_2D'], symb, alpha = .3 , label='2D')\n", "plt.plot(resultsvarynump['R1Ampsyserr%mean(priv)'],resultsvarynump['K1syserr%_1D'] , symb, alpha = .3, label='1D')\n", - "plt.legend()\n", + "text_color_legend()\n", "plt.gca().set_yscale('log')\n", "plt.xlabel('R1 Amp syserr mean (priv) (%)')\n", "plt.ylabel('k1 syserr (%)')\n", @@ -2170,12 +2783,20 @@ "plt.figure()\n", "#plt.plot(resultsvarynump['R1phasediffmean(priv)'],resultsvarynump['K1syserr%_2D'], symb, alpha = .3 , label='2D')\n", "plt.plot(resultsvarynump['R1phasediffmean(priv)'],resultsvarynump['K1syserr%_1D'], symb, alpha = .3, label='1D')\n", - "plt.legend()\n", + "text_color_legend()\n", "plt.gca().set_yscale('log')\n", - "plt.xlabel('R1 phase diff mean (privileged)')\n", + "plt.xlabel('R1 phase diff mean (privileged)');\n", "plt.ylabel('k1 syserr (%)')\n", "\n", "plt.figure()\n", + "#plt.plot(resultsvarynump['R1phasediffmean(priv)'],resultsvarynump['K1syserr%_2D'], symb, alpha = .3 , label='2D')\n", + "plt.plot(resultsvarynump['R1phasediffmean(priv)'],resultsvarynump['avgsyserr%_1D'], symb, alpha = .3, label='1D')\n", + "text_color_legend()\n", + "plt.gca().set_yscale('log')\n", + "plt.xlabel('R1 phase diff mean (privileged)');\n", + "plt.ylabel('avgsyserr (%)')\n", + "\n", + "plt.figure()\n", "plt.plot(resultsvarynump['meanSNR_R1'],resultsvarynump['avgsyserr%_3D'], symb, color = co3, alpha = .5)# , label = '3D')\n", "plt.plot(resultsvarynump['meanSNR_R1'],resultsvarynump['avgsyserr%_2D'], symb,color = co2, alpha = .5)# , label = '2D')\n", "plt.plot(resultsvarynump['meanSNR_R1'],resultsvarynump['avgsyserr%_1D'], symb,color = co1, alpha = .5)#, label = '1D' )\n", @@ -2186,7 +2807,7 @@ "plt.gca().set_xscale('log')\n", "plt.xlabel('meanSNR_R1')\n", "plt.ylabel('Avg err (%)')\n", - "plt.legend()\n", + "text_color_legend()\n", "\n", "if not MONOMER:\n", " plt.figure()\n", @@ -2200,7 +2821,7 @@ "plt.plot(resultsvarynump['minSNR_R1'],resultsvarynump['avgsyserr%_1D'], symb, alpha = .5 )\n", "plt.gca().set_yscale('log')\n", "plt.gca().set_xscale('log')\n", - "plt.xlabel('minSNR_R1')\n", + "plt.xlabel('minSNR_R1');\n", "plt.ylabel('Avg err (%)');" ] }, @@ -2210,13 +2831,69 @@ "metadata": {}, "outputs": [], "source": [ - "datestr = datestring()\n", - "resultsvarynump.to_csv(os.path.join(savefolder,\n", - " datestr + \"resultsvarynump.csv\"));\n", - "resultsvarynump.to_pickle(os.path.join(savefolder,\n", - " datestr + 'resultsvarynump.pkl'))\n", - "print('Saved: ' + os.path.join(savefolder,\n", - " datestr + 'resultsvarynump.csv'))" + "figsize = (2,2)\n", + "plt.figure(figsize = figsize)\n", + "#plt.plot(resultsvarynump['R1Ampsyserr%mean(priv)'],resultsvarynump['K1syserr%_2D'], symb, alpha = .3 , label='2D')\n", + "plt.plot(resultsvarynump['R1Ampsyserr%mean(priv)'],resultsvarynump['avgsyserr%_1D'] , symb, alpha = .3, label='1D')\n", + "text_color_legend()\n", + "plt.gca().set_yscale('log')\n", + "plt.xlabel('R1 Amp syserr mean (priv) (%)')\n", + "plt.ylabel('avgsyserr (%)')\n", + "\n", + "plt.figure(figsize = figsize)\n", + "#plt.plot(resultsvarynump['R1Ampsyserr%mean(priv)'],resultsvarynump['K1syserr%_2D'], symb, alpha = .3 , label='2D')\n", + "plt.plot(resultsvarynump['R2Ampsyserr%mean(priv)'],resultsvarynump['avgsyserr%_1D'] , symb, alpha = .3, label='1D')\n", + "text_color_legend()\n", + "plt.gca().set_yscale('log')\n", + "plt.xlabel('R2 Amp syserr mean (priv) (%)')\n", + "plt.ylabel('avgsyserr (%)')\n", + "\n", + "plt.figure(figsize = figsize)\n", + "plt.plot(resultsvarynump['R1phasediffmean(priv)'],resultsvarynump['avgsyserr%_3D'], symb, color=co3, alpha = .3 , label='3D')\n", + "plt.plot(resultsvarynump['R1phasediffmean(priv)'],resultsvarynump['avgsyserr%_2D'], symb, color=co2, alpha = .3 , label='2D')\n", + "plt.plot(resultsvarynump['R1phasediffmean(priv)'],resultsvarynump['avgsyserr%_1D'], symb, color = co1, alpha = .3, label='1D')\n", + "text_color_legend()\n", + "plt.gca().set_yscale('log')\n", + "plt.xlabel('R1 phase diff mean (privileged)');\n", + "plt.ylabel('avgsyserr (%)')\n", + "\n", + "plt.figure(figsize = figsize)\n", + "#plt.plot(resultsvarynump['R1phasediffmean(priv)'],resultsvarynump['K1syserr%_2D'], symb, alpha = .3 , label='2D')\n", + "plt.plot(resultsvarynump['R2phasediffmean(priv)'],resultsvarynump['avgsyserr%_1D'], symb, alpha = .3, label='1D')\n", + "text_color_legend()\n", + "plt.gca().set_yscale('log')\n", + "plt.xlabel('R2 phase diff mean (privileged)');\n", + "plt.ylabel('avgsyserr (%)')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#np.logspace(-3,-2,10)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"\n", + "#with rc.Params()\n", + "ytminor = np.logspace(start=np.log10(1),stop = np.log10(2),num = 10)\n", + "print(yt)\n", + "print(ytminor)\n", + "print([np.log10(y) for y in ytminor])\n", + "print([10**y for y in ytminor])\n", + "plt.figure()\n", + "#plt.yticks(yt,[10**y for y in yt] );\n", + "plt.yticks(yt)\n", + "\n", + "plt.yticks( ytminor, minor = True)\n", + "\"\"\"" ] }, { @@ -2228,6 +2905,13 @@ "stophere # Next: varynoiselevel()" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, { "cell_type": "code", "execution_count": null, @@ -2294,6 +2978,8 @@ " return resultsdf\n", "\n", "noises = np.logspace(-5,3,100)\n", + "if resonatorsystem == 15:\n", + " noises = np.logspace(-9,3,100)\n", "\n", "# Ran 50 times in 33.037 sec\n", "# Ran 80 times in 90.253 sec on desktop 21214\n", @@ -2311,28 +2997,36 @@ "printtime(repeats, before, after) \n", "display(resultsvarynoiselevel.transpose())\n", "\n", - "resultsvarynoiselevelmean = resultsvarynoiselevel.groupby(by=['noiselevel'], ).mean()\n", - "resultsvarynoiselevelmean[resultsvarynoiselevelmean.index.name] = resultsvarynoiselevelmean.index\n", + "resultsvarynoiselevelmean = resultsvarynoiselevel.groupby(by=['noiselevel'],as_index=False ).mean()\n", "\n", - "# initialize 95% confidence interval columns\n", + "# initialize 95% of data columns\n", "for column in ['E_lower_1D', 'E_upper_1D','E_lower_2D', 'E_upper_2D','E_lower_3D', 'E_upper_3D']:\n", " resultsvarynoiselevelmean[column] = np.nan\n", "\n", "dimensions = ['1D', '2D', '3D']\n", " \n", "for noise in noises:\n", - " for D in dimensions:\n", + " for D in dimensions: # ASE stands for average systematic err\n", " #plt.hist(resultsvarynoiselevel[resultsvarynoiselevel['noiselevel']== noise]['avgsyserr%_1D'])\n", " ASE = resultsvarynoiselevel[resultsvarynoiselevel['noiselevel']== noise]['avgsyserr%_' + D]\n", " ASE = np.sort(ASE)\n", " halfalpha = (1 - .95)/2\n", - " ## literally select the 95% confidence interval by tossing out the top 2.5% and the bottom 2.5% \n", + " ## literally select interval for the 95% of the data by tossing out the top 2.5% and the bottom 2.5% \n", " ## I could do a weighted average to work better with selecting the top 2.5% and bottom 2.5%\n", " ## But perhaps this is good enough for an estimate. It's ideal if I do 80 measurements.\n", " lowerbound = np.mean([ASE[int(np.floor(halfalpha*len(ASE)))], ASE[int(np.ceil(halfalpha*len(ASE)))]])\n", " upperbound = np.mean([ASE[-int(np.floor(halfalpha*len(ASE))+1)],ASE[-int(np.ceil(halfalpha*len(ASE))+1)]])\n", " resultsvarynoiselevelmean.loc[resultsvarynoiselevelmean['noiselevel']== noise,'E_lower_'+ D] = lowerbound\n", - " resultsvarynoiselevelmean.loc[resultsvarynoiselevelmean['noiselevel']== noise,'E_upper_' + D] = upperbound" + " resultsvarynoiselevelmean.loc[resultsvarynoiselevelmean['noiselevel']== noise,'E_upper_' + D] = upperbound\n", + "\n", + "if saving:\n", + " datestr = datestring()\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ str(repeats) + \"sims_per_sigma,\" + datestr + ', varynoise'\n", + " resultsvarynoiselevel.to_csv(savename + '.csv')\n", + " print(\"Saved:\", savename + '.csv')\n", + " savename = \"sys\" + str(resonatorsystem) + ',logmean_of_'+ str(repeats) + \"sims_per_sigma,\" + datestr + ', varynoise'\n", + " resultsvarynoiselevelmean.to_csv(savename + '.csv')\n", + " print(\"Saved:\", savename + '.csv')" ] }, { @@ -2377,12 +3071,124 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, + "outputs": [], + "source": [ + "def powlaw(x, C, m): #****\n", + " return C * x**(m)\n", + "\n", + "# perhaps a power law with slope 1 is better called a \n", + "# \"linear fit\" or a \"proportional fit\"\n", + "def linear(x, b,m):\n", + " return m * x + b\n", + "\n", + "# truncated power law from https://www.nature.com/articles/srep08898\n", + "def truncpow(t,C,m,tau):\n", + " return(C * np.exp(t/(-tau)) * t**(m))\n", + "\n", + "Ds = ['1D', '2D', '3D']\n", + "if MONOMER:\n", + " Rs = ['R1']\n", + "else:\n", + " Rs = ['R1', 'R2']\n", + "\n", + "for D in Ds:\n", + " for R in Rs:\n", + " xdata = resultsvarynoiselevelmean['log meanSNR_' + R]\n", + " ydata = resultsvarynoiselevelmean['log avgsyserr%_' + D]\n", + "\n", + "\n", + " \"\"\"fitparampowone, covpowone = curve_fit(powlawslopeone, xdata = xdata, ydata = ydata, \n", + " p0 = 1)#(fitparampow[0]))\n", + " powonefit = powlawslopeone(xdata,fitparampowone[0])\n", + " plt.plot(xdata,powonefit, label='power law slope 1', color='grey');\n", + " print ('\\nPower law with slope fixed at 1:')\n", + " print ( 'C = ' + str(fitparampowone[0]) + ' ± ' + str(np.sqrt(covpowone[0,0])))\n", + " print ('logarithmic slope m = 1')\"\"\"\n", + "\n", + " plt.figure()\n", + " plt.scatter(xdata,ydata, label= D + ',' + R)\n", + " \n", + " fitparampow, covpow = curve_fit(linear, xdata = xdata, ydata = ydata, p0 = (1, 1))\n", + " print('fitparampow:', fitparampow)\n", + " linearfit = linear(xdata,fitparampow[0],fitparampow[1])\n", + " plt.plot(xdata,linearfit, label='log-log linear fit', color='k');\n", + "\n", + " \"\"\"\n", + " fitparamtrunc, covtrunc = curve_fit(truncpow, xdata = xdata, ydata = ydata, \n", + " p0 = (fitparampow[0], fitparampow[1],1))\n", + " trucpowfit = truncpow(xdata,fitparamtrunc[0],fitparamtrunc[1], fitparamtrunc[2])\n", + " #plt.plot(xdata,trucpowfit, label='truncated power law fit', color='r');\n", + " print('fitparamtrunc:', fitparamtrunc)\n", + " \"\"\"\n", + " plt.legend()\n", + "\n", + "\n", + "\n", + "\n", + "\n", + " plt.figure(figsize=(1,6))\n", + " plt.imshow(abs(covpow), cmap=\"gray\", interpolation=\"nearest\", vmin=0)\n", + " plt.colorbar()\n", + " plt.title('Covariance matrix, power law fit, absolute values')\n", + " \n", + " \"\"\"\n", + " plt.figure(figsize=(1,6))\n", + " plt.imshow(abs(covtrunc), cmap=\"gray\", interpolation=\"nearest\", vmin=0)\n", + " plt.colorbar()\n", + " plt.title('Covariance matrix, truncated powlaw fit, absolute values')\n", + " \"\"\"\n", + " \n", + " print(\"\\nIt's ok to use the uncertainties below as long as there aren't strong off-diagonal values.\")\n", + " print('But there are, unfortunately.')\n", + " print ('\\nPower law, y=C*x^m:')\n", + " print ( 'C = ' + str(fitparampow[0]) + ' ± ' + str(np.sqrt(covpow[0,0])))\n", + " print ('logarithmic slope m = ' + str(fitparampow[1]) + ' ± ' + str(np.sqrt(covpow[1,1])))\n", + "\n", + " \"\"\"\n", + " print ('\\nTruncated Power law:')\n", + " print ( 'C = ' + str(fitparamtrunc[0]) + ' ± ' + str(np.sqrt(covtrunc[0,0])))\n", + " print ('logarithmic slope m = ' + str(fitparamtrunc[1]) + ' ± ' + str(np.sqrt(covtrunc[1,1])))\n", + " print ('constant tau = ' + str(fitparamtrunc[2]) + ' ± ' + str(np.sqrt(covtrunc[2,2])))\n", + " \"\"\"\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plt.figure(figsize=(1.5,1.5))\n", + "\n", + "for D in Ds:\n", + " for R in Rs:\n", + " xdata = 1/resultsvarynoiselevelmean['meanSNR_' + R]\n", + " ydata = (resultsvarynoiselevelmean['avgsyserr%_' + D])\n", + " #plt.figure()\n", + " #plt.scatter(x=resultsvarynoiselevel['meanSNR_' + R], y=100/resultsvarynoiselevel['avgsyserr%_' + D], \n", + " # marker = '.' , alpha = .05)\n", + " plt.plot(xdata,ydata, label = R + ',' + D )\n", + " plt.xlabel('1/SNR for resonator')\n", + " plt.ylabel(\"Avg err (%)\")\n", + " #plt.title(R + ',' + D)\n", + "plt.legend(loc='center left', bbox_to_anchor=(1, 0.5))\n", + "\n", + "if resonatorsystem == 2:\n", + " plt.ylim(ymin = 0, ymax = 2)\n", + "\n", + "print('resonatorsystem:', resonatorsystem)\n", + "describeresonator(vals_set, MONOMER, forceboth, noiselevel)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, "outputs": [], "source": [ - "import matplotlib as mpl #$$$$$\n", + "import matplotlib as mpl \n", "\n", "alpha = .01\n", "plotlog = True\n", @@ -2481,11 +3287,14 @@ " \n", " #sns.set_context('paper')\n", " ## cleaned figures\n", - " figsize = (figwidth/2,figwidth/2)\n", " if resonatorsystem == 9:\n", " figsize = (2.7,2.8)\n", " elif resonatorsystem == 2:\n", " figsize = (1.4,figwidth/2) # width, height \n", + " elif resonatorsystem == 15:\n", + " figsize = (4,4)\n", + " else:\n", + " figsize = (figwidth/2,figwidth/2)\n", " plt.figure(figsize = figsize, dpi=150)\n", " #signal / resultsvarynoiselevelmean['stdev']\n", " axa = plt.gca()\n", @@ -2523,7 +3332,8 @@ " if plotlog:\n", " axb.set_xticks(10**logmeanSNRticks)\n", " axb.set_xlabel('Mean SNR for ' + R)\n", - " plt.ylim(ymax=100, ymin=1e-8)\n", + " if resonatorsystem == 11 or resonatorsystem == 110:\n", + " plt.ylim(ymax=100, ymin=1e-8)\n", " plt.tight_layout()\n", " if saving:\n", " savename = 'sys' + str(resonatorsystem) + 'err_vs_SNR_' + R + ',cleaned,'+ \\\n", @@ -2599,10 +3409,12 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "scrolled": true + }, "outputs": [], "source": [ - "# varying param / varyparam / vary param vary\n", + "# varying param / varyparam / vary param vary / vary one param / vary 1 param\n", "\n", "def vary_param(paramname = 'm2', param_list = np.linspace(0.1, 60, num = 100), move_peaks = True, \n", " verboseall = False, repeats = 1, vals_set = vals_set, \n", @@ -2679,7 +3491,7 @@ "\n", " ## find peaks and choose frequency locations that match\n", " if move_peaks: \n", - " for i in range(2): # this is actually not redundant, even with iterations.\n", + " for i in range(3): # this is actually not redundant, even with iterations.\n", " morefrequencies = makemorefrequencies(minfreq= minfreq,maxfreq= maxfreq,\n", " res1 = res1, res2 = res2, # use last res1,2\n", " vals_set = vals_set, MONOMER=MONOMER, \n", @@ -2694,6 +3506,7 @@ " morefrequencies=morefrequencies,\n", " unique = True, veryunique = True, iterations = 3, numtoreturn = 2, \n", " verboseplot = False, verbose=False,returnoptions=True ) \n", + " reslist - np.sort(reslist)\n", " # I turned off verbose for res_freq_numeric\n", " drive = np.sort(np.unique(np.append(drive1,reslist)))\n", " morefrequencies = np.sort(np.unique(np.append(morefrequencies, drive)))\n", @@ -2736,13 +3549,17 @@ " # Ran 250 times in 9.627 sec on laptop\n", " # Ran 1000 times in 66.84 sec on laptop\n", " # Ran 25*100= 2500 times in 341.961 sec\n", - " maxparamvalue = 20\n", - " num_variations = 10\n", - " repeats = 25\n", - " paramname = 'm2'\n", - " noiselevel = .1\n", + " # Ran 10*25 = 250 times in 84.978 sec\n", + " # Ran 25*20 = 500 times in 87.478 sec\n", + " num_variations = 25\n", + " repeats = 20\n", + " minparamvalue = .1\n", + " maxparamvalue = 10\n", + " paramname = 'F'\n", + " #noiselevel = .1\n", " \n", - " param_list = np.linspace(0.1, maxparamvalue, num = num_variations)\n", + " param_list = np.linspace(minparamvalue, maxparamvalue, num = num_variations)\n", + " #param_list = np.linspace(maxparamvalue+.5, maxparamvalue * 2, num = num_variations)\n", " numberverbose = 2\n", "\n", " verboseindex = [int(x) for x in np.linspace(0, num_variations-1, numberverbose)]\n", @@ -2772,13 +3589,30 @@ "outputs": [], "source": [ "variedkey = paramname + '_set'\n", - "variedkeylabel = paramname + '$_\\mathrm{set}}$'\n", + "datestr = datestring()\n", + "saving = True\n", + "\n", + "def variedkeylabel(paramname):\n", + " if paramname == 'k1':\n", + " return '$k_{1,\\mathrm{set}}$ (N/m)'\n", + " if paramname == 'k2':\n", + " return '$k_{2,\\mathrm{set}}$ (N/m)'\n", + " if paramname == 'k12':\n", + " return '$k_{12,\\mathrm{set}}$ (N/m)'\n", + " if paramname == 'm2':\n", + " return '$m_{2,\\mathrm{set}}$ (N/m)'\n", + " if paramname == 'F' and not forceboth:\n", + " return '$F_{1,\\mathrm{set}}$ (N)'\n", + " else:\n", + " return paramname + '$_\\mathrm{set}}$'\n", + "\n", "\n", "try:\n", - " plt.plot(resultsdfvaryparam['Freq Method'], resultsdfvaryparam['avgsyserr%_1D'], '.')\n", + " plt.figure(figsize=(2,2))\n", + " plt.plot(resultsdfvaryparam['Freq Method'], resultsdfvaryparam['avgsyserr%_1D'], '.', alpha = .3)\n", " plt.xlabel('Freq Method')\n", - " plt.ylabel('Avg Syserr (%), 1D')\n", - " plt.figure()\n", + " plt.ylabel('Avg err (%), 1D')\n", + " plt.figure(figsize = (2,2))\n", " plt.scatter(x=resultsdfvaryparam[variedkey], y=resultsdfvaryparam['Freq1'], marker='.', c = resultsdfvaryparam['Freq Method'], cmap='tab10' )\n", " sc=plt.scatter(x=resultsdfvaryparam[variedkey], y=resultsdfvaryparam['Freq2'], marker='.', c = resultsdfvaryparam['Freq Method'], cmap = 'tab10')\n", " plt.xlabel(variedkey)\n", @@ -2787,15 +3621,93 @@ " cbar.outline.set_visible(False)\n", " cbar.set_label('Freq Method')\n", "except ValueError as e:\n", - " print(e)" + " print(e)\n", + "plt.show()\n", + "\n", + "log_SNR = False\n", + "if True: #***\n", + " fig, axs = plt.subplots(2,1, figsize=(figwidth/2, figwidth/2), sharex = 'all',\n", + " gridspec_kw={'hspace': 0, \n", + " 'height_ratios': [1, 3]})\n", + " plt.sca(axs[0])\n", + " if log_SNR:\n", + " vmin = np.log10(min(resultsdfvaryparam['SNR_R2_f1'].min(), resultsdfvaryparam['SNR_R2_f2'].min()))\n", + " vmax = np.log10(max(resultsdfvaryparam['SNR_R2_f1'].max(), resultsdfvaryparam['SNR_R2_f2'].max()))\n", + " c1 = np.log10(resultsdfvaryparam['SNR_R2_f1'])\n", + " c2 = np.log10(resultsdfvaryparam['SNR_R2_f2'])\n", + " else:\n", + " vmin = 0 #min(resultsdfvaryparam['SNR_R2_f1'].min(), resultsdfvaryparam['SNR_R2_f2'].min())/1000\n", + " vmax = max(resultsdfvaryparam['SNR_R2_f1'].max(), resultsdfvaryparam['SNR_R2_f2'].max())/1000\n", + " c1 = resultsdfvaryparam['SNR_R2_f1']/1000\n", + " c2 = resultsdfvaryparam['SNR_R2_f2']/1000\n", + " plt.scatter(resultsdfvaryparam[variedkey], resultsdfvaryparam['Freq1'] , c=c1 , \n", + " vmin=vmin,vmax=vmax,\n", + " marker='o', s=3, cmap = 'copper')\n", + " sc = plt.scatter(resultsdfvaryparam[variedkey], resultsdfvaryparam['Freq2'] , c= c2, #****\n", + " vmin=vmin,vmax=vmax,\n", + " marker='o', s=3, cmap = 'copper')\n", + " \"\"\"plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['Freq1'] , \n", + " symb, ms=1, color='k', label='$\\omega_a$', alpha=alpha)\n", + " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['Freq2'] , \n", + " symb, ms=1, color = 'k', label='$\\omega_b$', alpha=alpha)\"\"\"\n", + " #plt.legend()\n", + " #plt.title('Resonance frequencies')\n", + " plt.ylabel('$\\omega_\\mathrm{res}$ (rad/s)');\n", + " plt.xlabel(variedkeylabel(paramname));\n", + " \n", + " plt.sca(axs[1])\n", + " if paramname == 'm2':\n", + " plt.axvline(m1_set, color='k', lw= 0.5, alpha = .5 )\n", + " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['avgsyserr%_3D'], symb, ms = 1, color=co3, alpha=alpha)#, label='3D')\n", + " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['avgsyserr%_2D'], symb, ms=1, color=co2,alpha=alpha)#, label='2D')\n", + " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['avgsyserr%_1D'], symb, ms=1, color=co1,alpha=alpha)#, label='1D')\n", + " plt.plot(resultsdfvaryparammean[variedkey], 10**resultsdfvaryparammean['log avgsyserr%_3D'], color=co3, label='3D')\n", + " plt.plot(resultsdfvaryparammean[variedkey], 10**resultsdfvaryparammean['log avgsyserr%_2D'], color=co2, label='2D')\n", + " plt.plot(resultsdfvaryparammean[variedkey], 10**resultsdfvaryparammean['log avgsyserr%_1D'], color=co1, label='1D')\n", + "\n", + " #plt.plot(resultsdfvaryparam[variedkey],resultsdfvaryparam['rmssyserr%_2D'])\n", + " #plt.title('2D nullspace normalized by ' + normalizationpair)\n", + " plt.xlabel(variedkeylabel(paramname))\n", + " plt.ylabel('Avg err (%)');\n", + " plt.gca().set_yscale('log')\n", + " text_color_legend()\n", + " #plt.ylim(0,ymax=maxsyserr_to_plot)\n", + " \n", + " fig.subplots_adjust(right=0.8)\n", + " cbar_ax = fig.add_axes([0.85, 0.15, 0.05, 0.7])\n", + " cbar = fig.colorbar(sc, cax=cbar_ax)\n", + " cbar.outline.set_visible(False)\n", + " if log_SNR:\n", + " cbar.set_label('log SNR R2')\n", + " else:\n", + " cbar.set_label('SNR R2 (x1000)')\n", + "\n", + " if saving:\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"vary \" + paramname + ',' + datestr\n", + " savefigure(savename)\n", + " resultsdfvaryparam[[variedkey,'avgsyserr%_1D','avgsyserr%_2D','avgsyserr%_3D',\n", + " 'log avgsyserr%_1D','log avgsyserr%_2D','log avgsyserr%_3D',\n", + " 'SNR_R2_f1', 'SNR_R2_f2',\n", + " 'SNR_R1_f1', 'SNR_R1_f2'\n", + " ]].to_csv(savename + '.csv')\n", + " plt.show()\n", + " \n", + "print(len(resultsdfvaryparam), 'simulated experiments')\n", + "print(len(resultsdfvaryparammean), 'different ', paramname)\n", + "print(len(resultsdfvaryparam)/len(resultsdfvaryparammean), 'simulated experiments per each', paramname)\n" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, "outputs": [], "source": [ "describeresonator(vals_set, MONOMER,forceboth=forceboth, noiselevel=noiselevel)\n", @@ -2819,7 +3731,7 @@ " try:\n", " cbar = plt.colorbar(sc, ax = plt.gca())\n", " cbar.outline.set_visible(False)\n", - " cbar.set_label(variedkeylabel)\n", + " cbar.set_label(variedkeylabel(paramname))\n", " except AttributeError:\n", " pass\n", " except:\n", @@ -2836,30 +3748,30 @@ " try:\n", " cbar = plt.colorbar(sc,ax = plt.gca())\n", " cbar.outline.set_visible(False)\n", - " cbar.set_label(variedkeylabel)\n", + " cbar.set_label(variedkeylabel(paramname))\n", " except AttributeError:\n", " pass\n", " except:\n", " pass\n", "\n", " plt.figure()\n", - " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['Freq1'] , symb, color='k', label='$\\omega_1$', alpha=alpha)\n", - " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['Freq2'] , symb, color = 'k', label='$\\omega_2$', alpha=alpha)\n", + " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['Freq1'] , symb, color='k', label='$\\omega_a$', alpha=alpha)\n", + " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['Freq2'] , symb, color = 'k', label='$\\omega_b$', alpha=alpha)\n", " #plt.legend()\n", " plt.title('Resonance frequencies')\n", " plt.ylabel('Frequency (rad/s)');\n", - " plt.xlabel(variedkeylabel);\n", + " plt.xlabel(variedkeylabel(paramname));\n", "\n", " plt.figure()\n", - " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R1_phase_noiseless2']/np.pi, symb, label='$\\delta_1(\\omega_2)$', )\n", - " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R1_phase_noiseless1']/np.pi, symb, label='$\\delta_1(\\omega_1)$', )\n", + " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R1_phase_noiseless2']/np.pi, symb, label='$\\phi_1(\\omega_b)$', )\n", + " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R1_phase_noiseless1']/np.pi, symb, label='$\\phi_1(\\omega_a)$', )\n", " if not MONOMER:\n", - " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R2_phase_noiseless1']/np.pi, symb, label='$\\delta_2(\\omega_1)$', )\n", - " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R2_phase_noiseless2']/np.pi, symb, label='$\\delta_2(\\omega_2)$', )\n", + " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R2_phase_noiseless1']/np.pi, symb, label='$\\phi_2(\\omega_a)$', )\n", + " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R2_phase_noiseless2']/np.pi, symb, label='$\\phi_2(\\omega_b)$', )\n", " plt.axhline(-1/4)\n", " plt.legend()\n", - " plt.ylabel('$\\delta$ ($\\pi$)');\n", - " plt.xlabel(variedkeylabel);\n", + " plt.ylabel('$\\phi$ ($\\pi$)');\n", + " plt.xlabel(variedkeylabel(paramname));\n", " \n", " plt.figure()\n", " sc = plt.scatter(x=resultsdfvaryparam['1-avg_expt_cartes_rsqrd_1D'],y=resultsdfvaryparam['avgsyserr%_1D'], \n", @@ -2877,13 +3789,13 @@ "\n", "\n", " plt.figure()\n", - " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R1_amp_noiseless2'], symb, label='$A_1(\\omega_2)$', )\n", - " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R1_amp_noiseless1'], symb, label='$A_1(\\omega_1)$', )\n", + " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R1_amp_noiseless2'], symb, label='$A_1(\\omega_b)$', )\n", + " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R1_amp_noiseless1'], symb, label='$A_1(\\omega_a)$', )\n", " if not MONOMER:\n", - " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R2_amp_noiseless2'], symb, label='$A_2(\\omega_2)$', )\n", - " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R2_amp_noiseless1'], symb, label='$A_2(\\omega_1)$', )\n", + " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R2_amp_noiseless2'], symb, label='$A_2(\\omega_b)$', )\n", + " plt.plot(resultsdfvaryparam[variedkey], resultsdfvaryparam['R2_amp_noiseless1'], symb, label='$A_2(\\omega_a)$', )\n", " plt.ylabel('$A$ (arb. units)');\n", - " plt.xlabel(variedkeylabel);\n", + " plt.xlabel(variedkeylabel(paramname));\n", " plt.legend(loc='upper left', bbox_to_anchor=(1.05, 1.05), ncol=1,)\n", "\n", "\n", @@ -2911,7 +3823,7 @@ " #plt.ylim(0,25)\n", " plt.gca().set_yscale('log')\n", " plt.ylabel('Syserr (%)');\n", - " plt.xlabel(variedkeylabel);\n", + " plt.xlabel(variedkeylabel(paramname));\n", "\n", " plt.figure()\n", " if paramname == 'm2':\n", @@ -2925,7 +3837,7 @@ "\n", " #plt.plot(resultsdfvaryparam[variedkey],resultsdfvaryparam['rmssyserr%_2D'])\n", " #plt.title('2D nullspace normalized by ' + normalizationpair)\n", - " plt.xlabel(variedkeylabel)\n", + " plt.xlabel(variedkeylabel(paramname))\n", " plt.ylabel('Average syserr (%)');\n", " plt.gca().set_yscale('log')\n", " plt.legend()\n", @@ -2941,7 +3853,7 @@ " #plt.ylim(ymin=0)\n", " plt.gca().set_yscale('log')\n", " plt.legend()\n", - " plt.xlabel(variedkeylabel)\n", + " plt.xlabel(variedkeylabel(paramname))\n", " plt.ylabel('SNR');\n", "\n", " plt.figure();\n", @@ -2951,18 +3863,10 @@ " plt.plot(resultsdfvaryparam[variedkey],resultsdfvaryparam.R2_amp_meas1, symb, label=\"R2, f1\", alpha=alpha)\n", " plt.plot(resultsdfvaryparam[variedkey],resultsdfvaryparam.R2_amp_meas2, symb, label=\"R2, f2\", alpha=alpha)\n", " plt.legend()\n", - " plt.xlabel(variedkeylabel)\n", + " plt.xlabel(variedkeylabel(paramname))\n", " plt.ylabel('amplitude (arb. units)');\n", "\n", - " \"\"\"\n", - " # Make sure that SNRcalc is being used correctly.\n", - " plt.figure()\n", - " plt.plot(resultsdfvaryparam[variedkey],resultsdfvaryparam.R1_amp_meas1, label=\"R1, f1, measured\" )\n", - " plt.plot(resultsdfvaryparam[variedkey],resultsdfvaryparam.A1f1avg, label='R1, f1,average measured')\n", - " plt.legend()\n", - " plt.xlabel(variedkeylabel)\n", - " plt.ylabel('amplitude (arb. units)');\n", - " \"\"\"\n", + "\n", "\n", " plt.figure();\n", " if len(resultsdfvaryparam) >400:\n", @@ -3003,7 +3907,7 @@ " try:\n", " cbar = plt.colorbar(sc);\n", " cbar.outline.set_visible(False);\n", - " cbar.set_label(variedkeylabel);\n", + " cbar.set_label(variedkeylabel(paramname));\n", " except AttributeError:\n", " pass\n", " plt.gca().axis('equal');\n", @@ -3054,7 +3958,8 @@ "metadata": {}, "outputs": [], "source": [ - "plt.figure()\n", + "figsize = (figwidth/2,1.3)\n", + "plt.figure(figsize = figsize)\n", "#plt.loglog(resultsdfvaryparam['1-avg_expt_cartes_rsqrd_3D'],\n", "# resultsdfvaryparam['avgsyserr%_3D'], symb , color=co3, alpha = alpha/2)\n", "#plt.loglog(resultsdfvaryparam['1-avg_expt_cartes_rsqrd_2D'],\n", @@ -3065,14 +3970,15 @@ "\n", "sc = plt.scatter(x=resultsdfvaryparam['1-avg_expt_cartes_rsqrd_1D'],y=resultsdfvaryparam['avgsyserr%_1D'], \n", " c = resultsdfvaryparam[variedkey],\n", - " marker = symb , alpha = alpha, cmap = 'rainbow')\n", + " marker = symb , s=1, alpha = alpha, cmap = 'rainbow')\n", "cbar = plt.colorbar(sc)\n", "cbar.outline.set_visible(False)\n", "cbar.set_label(variedkey)\n", "plt.gca().set_xscale('log')\n", "plt.gca().set_yscale('log')\n", "\n", - "plt.xlabel('1-avg_expt_cartes_rsqrd_1D')\n", + "#plt.xlabel('1-avg_expt_cartes_rsqrd_1D')\n", + "plt.xlabel('$1-R^2_\\mathrm{avg}$')\n", "plt.ylabel('avgsyserr%_1D')\n", "plt.axis('equal');\n", "plt.title('$1-R^2$ predicts syserr')\n", @@ -3096,23 +4002,23 @@ "ydata = resultsdfvaryparam['avgsyserr%_1D']\n", "\n", "\n", - "fitparampowone, covpowone = curve_fit(powlawslopeone, xdata = xdata, ydata = ydata, \n", + "\"\"\"fitparampowone, covpowone = curve_fit(powlawslopeone, xdata = xdata, ydata = ydata, \n", " p0 = 1)#(fitparampow[0]))\n", "powonefit = powlawslopeone(xdata,fitparampowone[0])\n", - "plt.plot(xdata,powonefit, label='power law slope 1', color='k');\n", + "plt.plot(xdata,powonefit, label='power law slope 1', color='grey');\n", "print ('\\nPower law with slope fixed at 1:')\n", "print ( 'C = ' + str(fitparampowone[0]) + ' ± ' + str(np.sqrt(covpowone[0,0])))\n", - "print ('logarithmic slope m = 1')\n", + "print ('logarithmic slope m = 1')\"\"\"\n", "\n", - "\"\"\"fitparampow, covpow = curve_fit(powlaw, xdata = xdata, ydata = ydata, p0 = (1, 1))\n", + "fitparampow, covpow = curve_fit(powlaw, xdata = xdata, ydata = ydata, p0 = (1, 1))\n", "print('fitparampow:', fitparampow)\n", "powlawfit = powlaw(xdata,fitparampow[0],fitparampow[1])\n", - "plt.plot(xdata,powlawfit, label='power law fit', color='grey');\n", + "plt.plot(xdata,powlawfit, label='power law fit', color='k');\n", "\n", "fitparamtrunc, covtrunc = curve_fit(truncpow, xdata = xdata, ydata = ydata, \n", " p0 = (fitparampow[0], fitparampow[1],1))\n", "trucpowfit = truncpow(xdata,fitparamtrunc[0],fitparamtrunc[1], fitparamtrunc[2])\n", - "plt.plot(xdata,trucpowfit, label='truncated power law fit', color='r');\n", + "#plt.plot(xdata,trucpowfit, label='truncated power law fit', color='r');\n", "print('fitparamtrunc:', fitparamtrunc)\n", "\n", "\n", @@ -3131,7 +4037,7 @@ "\n", "print(\"\\nIt's ok to use the uncertainties below as long as there aren't strong off-diagonal values.\")\n", "print('But there are, unfortunately.')\n", - "print ('\\nPower law:')\n", + "print ('\\nPower law, y=C*x^m:')\n", "print ( 'C = ' + str(fitparampow[0]) + ' ± ' + str(np.sqrt(covpow[0,0])))\n", "print ('logarithmic slope m = ' + str(fitparampow[1]) + ' ± ' + str(np.sqrt(covpow[1,1])))\n", "\n", @@ -3139,7 +4045,7 @@ "print ( 'C = ' + str(fitparamtrunc[0]) + ' ± ' + str(np.sqrt(covtrunc[0,0])))\n", "print ('logarithmic slope m = ' + str(fitparamtrunc[1]) + ' ± ' + str(np.sqrt(covtrunc[1,1])))\n", "print ('constant tau = ' + str(fitparamtrunc[2]) + ' ± ' + str(np.sqrt(covtrunc[2,2])))\n", - "\"\"\";\n", + "\n", "\n" ] }, @@ -3260,9 +4166,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "maxsyserr_to_plot = 10\n", @@ -3322,14 +4226,14 @@ "plt.xlabel(variedkeylabel);\n", "\n", "plt.figure()\n", - "plt.plot(resultsdfk1[variedkey], resultsdfk1['R1_phase_noiseless2']/np.pi, symb, label='$\\delta_1(\\omega_2)$', alpha=alpha)\n", - "plt.plot(resultsdfk1[variedkey], resultsdfk1['R1_phase_noiseless1']/np.pi, symb, label='$\\delta_1(\\omega_1)$', alpha=alpha)\n", + "plt.plot(resultsdfk1[variedkey], resultsdfk1['R1_phase_noiseless2']/np.pi, symb, label='$\\phi_1(\\omega_2)$', alpha=alpha)\n", + "plt.plot(resultsdfk1[variedkey], resultsdfk1['R1_phase_noiseless1']/np.pi, symb, label='$\\phi_1(\\omega_1)$', alpha=alpha)\n", "if not MONOMER:\n", - " plt.plot(resultsdfk1[variedkey], resultsdfk1['R2_phase_noiseless1']/np.pi, symb, label='$\\delta_2(\\omega_1)$', alpha=alpha)\n", - " plt.plot(resultsdfk1[variedkey], resultsdfk1['R2_phase_noiseless2']/np.pi, symb, label='$\\delta_2(\\omega_2)$', alpha=alpha)\n", + " plt.plot(resultsdfk1[variedkey], resultsdfk1['R2_phase_noiseless1']/np.pi, symb, label='$\\phi_2(\\omega_1)$', alpha=alpha)\n", + " plt.plot(resultsdfk1[variedkey], resultsdfk1['R2_phase_noiseless2']/np.pi, symb, label='$\\phi_2(\\omega_2)$', alpha=alpha)\n", "plt.axhline(-1/4)\n", "plt.legend()\n", - "plt.ylabel('$\\delta$ ($\\pi$)');\n", + "plt.ylabel('$\\phi$ ($\\pi$)');\n", "plt.xlabel(variedkeylabel);\n", "\n", "\n", @@ -3407,16 +4311,7 @@ "plt.xlabel(variedkeylabel)\n", "plt.ylabel('amplitude (arb. units)');\n", "\n", - "\"\"\"\n", - "# Make sure that SNRcalc is being used correctly.\n", - "plt.figure()\n", - "plt.plot(resultsdfk1[variedkey],resultsdfk1.R1_amp_meas1, label=\"R1, f1, measured\" )\n", - "plt.plot(resultsdfk1[variedkey],resultsdfk1.A1f1avg, label='R1, f1,average measured')\n", - "plt.legend()\n", - "plt.xlabel(variedkeylabel)\n", - "plt.ylabel('amplitude (arb. units)');\n", - "\"\"\"\n", - "\n", + "\n", "plt.figure()\n", "if len(resultsdfk1) >400:\n", " alpha = .4\n", @@ -3661,9 +4556,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "maxsyserr_to_plot = 10\n", @@ -3707,13 +4600,13 @@ "plt.xlabel('$k_{12, \\mathrm{set}}$ (N/m)');\n", "\n", "plt.figure()\n", - "plt.plot(resultsdfk12.k12_set, resultsdfk12['R1_phase_noiseless2']/np.pi, symb, label='$\\delta_1(\\omega_2)$', alpha=alpha)\n", - "plt.plot(resultsdfk12.k12_set, resultsdfk12['R1_phase_noiseless1']/np.pi, symb, label='$\\delta_1(\\omega_1)$', alpha=alpha)\n", - "plt.plot(resultsdfk12.k12_set, resultsdfk12['R2_phase_noiseless1']/np.pi, symb, label='$\\delta_2(\\omega_1)$', alpha=alpha)\n", - "plt.plot(resultsdfk12.k12_set, resultsdfk12['R2_phase_noiseless2']/np.pi, symb, label='$\\delta_2(\\omega_2)$', alpha=alpha)\n", + "plt.plot(resultsdfk12.k12_set, resultsdfk12['R1_phase_noiseless2']/np.pi, symb, label='$\\phi_1(\\omega_2)$', alpha=alpha)\n", + "plt.plot(resultsdfk12.k12_set, resultsdfk12['R1_phase_noiseless1']/np.pi, symb, label='$\\phi_1(\\omega_1)$', alpha=alpha)\n", + "plt.plot(resultsdfk12.k12_set, resultsdfk12['R2_phase_noiseless1']/np.pi, symb, label='$\\phi_2(\\omega_1)$', alpha=alpha)\n", + "plt.plot(resultsdfk12.k12_set, resultsdfk12['R2_phase_noiseless2']/np.pi, symb, label='$\\phi_2(\\omega_2)$', alpha=alpha)\n", "plt.axhline(-1/4)\n", "plt.legend()\n", - "plt.ylabel('$\\delta$ ($\\pi$)');\n", + "plt.ylabel('$\\phi$ ($\\pi$)');\n", "plt.xlabel('$k_{12, \\mathrm{set}}$ (N/m)');\n", "\n", "\n", @@ -3788,15 +4681,6 @@ "plt.xlabel('$k_{12, \\mathrm{set}}$ (N/m)')\n", "plt.ylabel('amplitude (arb. units)');\n", "\n", - "\"\"\"\n", - "# Make sure that SNRcalc is being used correctly.\n", - "plt.figure()\n", - "plt.plot(resultsdfk12.k12_set,resultsdfk12.R1_amp_meas1, label=\"R1, f1, measured\" )\n", - "plt.plot(resultsdfk12.k12_set,resultsdfk12.A1f1avg, label='R1, f1,average measured')\n", - "plt.legend()\n", - "plt.xlabel('$k_{12, \\mathrm{set}}$ (N/m)')\n", - "plt.ylabel('amplitude (arb. units)');\n", - "\"\"\"\n", "\n", "plt.figure()\n", "if len(resultsdfk12) >400:\n", @@ -4018,9 +4902,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "symb = '.'\n", @@ -4096,16 +4978,6 @@ " plt.xlabel('$k_{2, \\mathrm{set}}$ (N/m)')\n", " plt.ylabel('amplitude (arb. units)');\n", "\n", - " \"\"\"\n", - " # Make sure that SNRcalc is being used correctly.\n", - " plt.figure()\n", - " plt.plot(resultsdfk2['k2_set'],resultsdfk2['R1_amp_meas1, label=\"R1, f1, measured\" )\n", - " plt.plot(resultsdfk2['k2_set'],resultsdfk2['A1f1avg, label='R1, f1,average measured')\n", - " plt.legend()\n", - " plt.xlabel('$k_{2, \\mathrm{set}}$ (N/m)')\n", - " plt.ylabel('amplitude (arb. units)');\n", - " \"\"\"\n", - "\n", " plt.figure()\n", " if len(resultsdfk2) >400:\n", " alpha = .4\n", @@ -4151,7 +5023,7 @@ "metadata": {}, "outputs": [], "source": [ - "stophere" + "stophere # Sweep TWO frequencies / Sweep 2 freq / 2freq / Sweep two freq / sweep pair of frequencies / vary two freqs" ] }, { @@ -4173,13 +5045,6 @@ " pass" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, { "cell_type": "code", "execution_count": null, @@ -4192,7 +5057,7 @@ "\n", "def sweep_freq_pair(drive=drive, vals_set = vals_set, \n", " noiselevel = noiselevel, freq3 = None, MONOMER=MONOMER, repeats = 1,\n", - " verbose = verbose, forceboth = forceboth):\n", + " forceboth = forceboth):\n", "\n", " [m1_set, m2_set, b1_set, b2_set, k1_set, k2_set, k12_set, F_set] = read_params(vals_set, MONOMER)\n", " \n", @@ -4214,7 +5079,7 @@ "\n", " thisres = simulated_experiment(freqs, drive=drive,vals_set = vals_set, \n", " noiselevel=noiselevel, MONOMER=MONOMER, forceboth=forceboth,\n", - " repeats=repeats , verbose = verbose, noiseless_spectra=noiseless_spectra)\n", + " repeats=repeats , verbose = False, noiseless_spectra=noiseless_spectra)\n", " \n", " try: # repeated experiments results\n", " resultsdf = pd.concat([resultsdf,thisres], ignore_index=True)\n", @@ -4238,38 +5103,59 @@ " # 30 frequencies\n", " # Ran 1 times in 15.68 sec\"\"\"\n", "\n", - "# 30 is small. 200 is big.\n", - "numfreq = 30\n", - "repeats = 6\n", - "noiselevel = 1\n", + "if False:\n", + " # 30 is small. 200 is big.\n", + " \"\"\"\n", + " 200 frequencies\n", + " Ran 6 times in 11300.511 sec = 3.13 hours.\n", + " \"\"\"\n", + " \n", + " numfreq = 200\n", + " repeats = 6\n", + " noiselevel = 1\n", "\n", - "thisdrive, _ = create_drive_arrays(vals_set, MONOMER, forceboth, n=numfreq, \n", - " morefrequencies = morefrequencies, includefreqs = reslist,\n", - " minfreq = minfreq, maxfreq = maxfreq, \n", - " staywithinlims = False,\n", - " callmakemore = False,\n", - " verbose = verbose)\n", + " thisdrive, _ = create_drive_arrays(vals_set, MONOMER, forceboth, n=numfreq, \n", + " morefrequencies = morefrequencies, includefreqs = reslist,\n", + " minfreq = minfreq, maxfreq = maxfreq, \n", + " staywithinlims = False,\n", + " callmakemore = False,\n", + " verbose = verbose)\n", "\n", - "before = time()\n", + " before = time()\n", "\n", - "for i in range(1):\n", - " thisres = sweep_freq_pair(drive=thisdrive, vals_set = vals_set, noiselevel = noiselevel, freq3 = None, \n", - " MONOMER = MONOMER, forceboth=forceboth, repeats = repeats)\n", - " try:\n", - " resultsdfsweep2freqorig = pd.concat([resultsdfsweep2freqorig,thisres], ignore_index=True)\n", - " except:\n", - " resultsdfsweep2freqorig = thisres\n", - "after = time()\n", - "print(len(thisdrive), 'frequencies')\n", - "printtime(repeats, before, after)\n", - "# Ran 1 times in 6.624 sec\n", - "# Ran 1 times in 4.699 sec\n", - "# 30 frequencies Ran 1 times in 15.898 sec\n", - "# 231 frequencies Ran 1 times in 273.113 sec\n", - "# 291 frequencies Ran 1 times in 493.772 sec\n", - "# 33 frequencies Ran 6 times in 250.501 sec\n", + " for i in range(1):\n", + " thisres = sweep_freq_pair(drive=thisdrive, vals_set = vals_set, noiselevel = noiselevel, freq3 = None, \n", + " MONOMER = MONOMER, forceboth=forceboth, repeats = repeats)\n", + " try:\n", + " resultsdfsweep2freqorig = pd.concat([resultsdfsweep2freqorig,thisres], ignore_index=True)\n", + " except:\n", + " resultsdfsweep2freqorig = thisres\n", + " after = time()\n", + " print(len(thisdrive), 'frequencies')\n", + " printtime(repeats, before, after)\n", + " # Ran 1 times in 6.624 sec\n", + " # Ran 1 times in 4.699 sec\n", + " # 30 frequencies Ran 1 times in 15.898 sec\n", + " # 231 frequencies Ran 1 times in 273.113 sec\n", + " # 291 frequencies Ran 1 times in 493.772 sec\n", + " # 33 frequencies Ran 6 times in 250.501 sec\n", + " # 201 frequencies Ran 6 times in 4358.699 sec (72.644983333 hours)\n", + "\n", + " datestr = datestring()\n", + " resultsdfsweep2freqorig.to_csv(\"sys\" + str(resonatorsystem) + ',2freq,' + datestr + '.csv')\n", + "else:\n", + " if MONOMER:\n", + " saveddf = os.path.join(r'G:\\Shared drives\\Horowitz Lab Notes\\Horowitz, Viva - notes and files\\simulation_export',\n", + " 'sys-3,2freq,2022-12-29 20;03;50.csv') # MONOMER\n", + " resonatorsystem = -30\n", + " else:\n", + " saveddf = os.path.join(r'G:\\Shared drives\\Horowitz Lab Notes\\Horowitz, Viva - notes and files\\simulation_export',\n", + " 'sys11,2freq,2023-01-07 13;53;00.csv') # DIMER\n", + " resonatorsystem = 110 # the 0 means it was reloaded\n", + " resultsdfsweep2freqorig = pd.read_csv(saveddf)\n", + " print('Opened existing file:', saveddf)\n", "\n", - "resultsdfsweep2freqorigmean = resultsdfsweep2freqorig.groupby(by=['Freq1', 'Freq2'],as_index=False).mean()\n", + "resultsdfsweep2freqorigmean = resultsdfsweep2freqorig.groupby(by=['Freq1', 'Freq2'],as_index=False).mean(numeric_only=True)\n", "\n", "\n", "## remove diagonal parameters from resultsdf for the following plots.\n", @@ -4277,6 +5163,13 @@ "resultsdfmean = resultsdfsweep2freqorigmean[resultsdfsweep2freqorig.Difference != 0]" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, { "cell_type": "code", "execution_count": null, @@ -4289,9 +5182,39 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": true - }, + "metadata": {}, + "outputs": [], + "source": [ + "round(resultsdfmean.Freq1.min(),1)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "round(resultsdfmean.Freq1.max(),1)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "## reset resonances\n", + "for i in range(3):\n", + " res1,res2 = res_freq_numeric(vals_set=vals_set, MONOMER=MONOMER, forceboth=forceboth, includefreqs = reslist, \n", + " minfreq = minfreq, maxfreq = maxfreq,\n", + " verboseplot = False, verbose=False, iterations = 3,\n", + " numtoreturn=2)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, "outputs": [], "source": [ "describeresonator(vals_set = vals_set, MONOMER=MONOMER, noiselevel = noiselevel, forceboth = forceboth)\n", @@ -4327,13 +5250,13 @@ "plt.sca(ax4)\n", "SSgrid=resultsdfsweep2freqorigmean.pivot_table(\n", " index = 'Freq1', columns = 'Freq2', values = 'log avgsyserr%_1D').sort_index(axis = 0, ascending = False)\n", - "myheatmap(SSgrid, \"log average sys error\", vmax=vmax, cmap='magma_r'); \n", + "myheatmap(SSgrid, \"log average error, 1D (%)\", vmax=vmax, cmap='magma_r'); \n", "plt.title('1d')\n", "\n", "plt.sca(ax4b)\n", "SSgrid=resultsdfsweep2freqorigmean.pivot_table(\n", " index = 'Freq1', columns = 'Freq2', values = 'log maxsyserr%_1D').sort_index(axis = 0, ascending = False)\n", - "myheatmap(SSgrid, \"log max sys error\", vmax=vmax, cmap='magma_r'); \n", + "myheatmap(SSgrid, \"log max error, 1D (%)\", vmax=vmax, cmap='magma_r'); \n", "plt.title('1d')\n", "\n", "plt.sca(ax5)\n", @@ -4346,13 +5269,13 @@ "plt.sca(ax6)\n", "SSgrid=resultsdfsweep2freqorigmean.pivot_table(\n", " index = 'Freq1', columns = 'Freq2', values = 'log avgsyserr%_2D').sort_index(axis = 0, ascending = False)\n", - "myheatmap(SSgrid, \"log average sys error\", vmax=vmax, cmap='magma_r'); \n", + "myheatmap(SSgrid, \"log average error, 2D (%)\", vmax=vmax, cmap='magma_r'); \n", "plt.title('2d')\n", "\n", "plt.sca(ax6b)\n", "grid=resultsdfsweep2freqorigmean.pivot_table(\n", " index = 'Freq1', columns = 'Freq2', values = 'log maxsyserr%_2D').sort_index(axis = 0, ascending = False)\n", - "myheatmap(grid, \"log max sys error\", vmax=vmax, cmap='magma_r'); \n", + "myheatmap(grid, \"log max error, 2D (%)\", vmax=vmax, cmap='magma_r'); \n", "plt.title('2d')\n", "\n", " \n", @@ -4372,7 +5295,15 @@ " plt.sca(ax)\n", " ax.axis('equal');\n", " #plt.xticks([res1, res2])\n", - " plt.yticks([res1, res2])\n", + " if res1 == res2:\n", + " plt.yticks([round(resultsdfmean.Freq1.min(),1), round(res1,2), round(resultsdfmean.Freq1.max(),1)])\n", + " else:\n", + " try:\n", + " plt.yticks([res1, res2])\n", + " except:\n", + " pass\n", + "plt.tight_layout()\n", + "plt.show()\n", "\n", "fig, ((ax1, ax2, ax7), (ax3, ax4, ax4b), (ax5, ax6, ax6b)) = plt.subplots(3, 3, figsize=figsize)\n", "\n", @@ -4469,6 +5400,7 @@ " grid=resultsdfsweep2freqorigmean.pivot_table(\n", " index = 'R2_phase_noiseless1', columns = 'R2_phase_noiseless2', values = 'log avgsyserr%_1D').sort_index(axis = 0, ascending = False)\n", " myheatmap(grid, \"log avgsyserr%_1D\", cmap = 'magma_r')#, vmax = 2); \n", + " plt.axis('equal')\n", "\n", "maxsyserr_to_plot = 1\n", "\n", @@ -4503,6 +5435,8 @@ "plt.gca().set_xscale('log')\n", "plt.gca().set_yscale('log')\n", "plt.gca().axis('equal');\n", + "plt.tight_layout()\n", + "plt.show()\n", "\n", "plt.figure()\n", "if MONOMER:\n", @@ -4515,9 +5449,12 @@ "plt.gca().set_xscale('log')\n", "plt.gca().set_yscale('log')\n", "plt.gca().axis('equal');\n", + "plt.tight_layout()\n", + "plt.show()\n", "\n", "## I cut out f1 = f2\n", "plt.figure()\n", + "print('I cut out f1 = f2.')\n", "if MONOMER:\n", " plt.loglog(resultsdf.SNR_R1_f1, resultsdf['maxsyserr%_2D'], '.', alpha=.5)\n", " plt.xlabel('SNR_R1_f1')\n", @@ -4526,6 +5463,8 @@ " plt.xlabel('SNR_R2_f1') \n", "plt.ylabel('maxsyserr_2D (%)')\n", "#plt.ylim(ymin=0, ymax=maxsyserr_to_plot)\n", + "plt.tight_layout()\n", + "plt.show()\n", "\n", "plt.figure()\n", "if MONOMER:\n", @@ -4535,6 +5474,8 @@ " plt.loglog(resultsdf.minSNR_R2, resultsdf['avgsyserr%_2D'], '.', alpha=.5)\n", " plt.xlabel('minSNR_R2') \n", "plt.ylabel('avgsyserr%_2D')\n", + "plt.tight_layout()\n", + "plt.show()\n", "\n", "plt.figure()\n", "if MONOMER:\n", @@ -4544,6 +5485,8 @@ " plt.loglog(resultsdf.maxSNR_R2, resultsdf['avgsyserr%_2D'], '.', alpha=.5)\n", " plt.xlabel('maxSNR_R2') \n", "plt.ylabel('avgsyserr%_2D')\n", + "plt.tight_layout()\n", + "plt.show()\n", "\n", "plt.figure()\n", "if MONOMER:\n", @@ -4553,14 +5496,441 @@ " plt.loglog(resultsdf.meanSNR_R2, resultsdf['avgsyserr%_2D'], '.', alpha=.5)\n", " plt.xlabel('meanSNR_R2') \n", "plt.ylabel('avgsyserr%_2D')\n", + "plt.tight_layout()\n", + "plt.show()\n", + "\n", + "\n", + "plt.figure()\n", + "plt.plot(resultsdf.Freq1, resultsdf['log maxsyserr%_2D'] , '.', alpha=.5,)\n", + "#plt.ylim(ymin=0, ymax=maxsyserr_to_plot)\n", + "plt.xlabel('Freq1 (rad/s)')\n", + "plt.ylabel('log maxsyserr_2D (%)')\n", + "#plt.xticks([res1, res2]);\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "reslist" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "resonatorsystem" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# *****\n", + "figsize = (figwidth/2, 1.3)\n", + "symb = '.'\n", + "lw = 1\n", + "datestr = datestring()\n", + "roundedres = [round(w,2) for w in reslist[:2]]\n", + "ticklist = [round(minfreq),round(maxfreq)] + roundedres\n", + "saving = False\n", + "do_3D = True\n", + "set_format()\n", + "saving = True\n", + "\n", + "SSgrid1D=resultsdfsweep2freqorigmean.pivot_table(\n", + " index = 'Freq1', columns = 'Freq2', values = 'log avgsyserr%_1D').sort_index(axis = 0, ascending = False)\n", + "SSgrid2D=resultsdfsweep2freqorigmean.pivot_table(\n", + " index = 'Freq1', columns = 'Freq2', values = 'log avgsyserr%_2D').sort_index(axis = 0, ascending = False)\n", + "if do_3D:\n", + " SSgrid3D=resultsdfsweep2freqorigmean.pivot_table(\n", + " index = 'Freq1', columns = 'Freq2', values = 'log avgsyserr%_3D').sort_index(axis = 0, ascending = False)\n", + " vmin = min(SSgrid1D.min().min(), SSgrid2D.min().min(),SSgrid3D.min().min()) # use same scale for all 3\n", + "else:\n", + " vmin = min(SSgrid1D.min().min(), SSgrid2D.min().min()) # use same scale for both\n", + "vmax = 2\n", + "print('vmin:', vmin, ', corresponding to ', 10**vmin, '%')\n", + "\n", + "\n", + "plt.figure(figsize = (1.555,1.3), dpi= 300 )\n", + "myheatmap(SSgrid1D, \"log average error\", vmin=vmin, vmax=vmax, cmap='magma_r'); \n", + "plt.title('1D-SVD')\n", + "plt.ylabel('$\\omega_a$ (rad/s)')\n", + "plt.xlabel('$\\omega_b$ (rad/s)')\n", + "if True: #resonatorsystem == 11 or resonatorsystem == 110:\n", + " #plt.xticks(ticklist)\n", + " #plt.yticks(ticklist)\n", + " #plt.xticks(range(round(maxfreq)+1))\n", + " plt.xticks([res1, res2])\n", + " plt.xticks([], minor = True)\n", + " plt.yticks(range(round(maxfreq)+1)) \n", + "plt.axis('equal')\n", + "plt.tight_layout()\n", + "if saving:\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"1D2freqheatmap,\" + datestr\n", + " savefigure(savename)\n", + "plt.show()\n", + "\n", + "plt.figure(figsize = (1.555,1.3), dpi= 300 )\n", + "myheatmap(SSgrid2D, \"log average error\", vmin=vmin, vmax=vmax, cmap='magma_r'); \n", + "plt.title('2D-SVD')\n", + "plt.ylabel('$\\omega_a$ (rad/s)')\n", + "plt.xlabel('$\\omega_b$ (rad/s)')\n", + "if True: #resonatorsystem == 11 or resonatorsystem == 110:\n", + " #plt.xticks(ticklist)\n", + " #plt.yticks(ticklist)\n", + " #plt.xticks(range(round(maxfreq)+1))\n", + " plt.xticks([res1, res2])\n", + " plt.xticks([], minor = True)\n", + " plt.yticks(range(round(maxfreq)+1))\n", + "plt.axis('equal')\n", + "plt.tight_layout()\n", + "if saving:\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"2D2freqheatmap,\" + datestr\n", + " savefigure(savename)\n", + "plt.show()\n", + "\n", + "if do_3D:\n", + " plt.figure(figsize = (1.555,1.3), dpi= 300 )\n", + " ax,cbar = myheatmap(SSgrid3D, \"log average error\",vmin=vmin, vmax=vmax, cmap='magma_r',return_cbar=True); \n", + " if resonatorsystem == 110:\n", + " cbarticks = [0,1,2]\n", + " cbarticklabels = ['$10^'+str(tick)+'$' for tick in cbarticks]\n", + " cbarticklabels[-1] = '>' + cbarticklabels[-1]\n", + " cbar.set_ticks(cbarticks, labels=cbarticklabels)\n", + " plt.title('3D-SVD')\n", + " plt.ylabel('$\\omega_a$ (rad/s)')\n", + " plt.xlabel('$\\omega_b$ (rad/s)')\n", + " if True: #resonatorsystem == 11 or resonatorsystem == 110:\n", + " #plt.xticks(ticklist)\n", + " #plt.yticks(ticklist)\n", + " #plt.xticks(range(round(maxfreq)+1))\n", + " plt.xticks([res1, res2])\n", + " plt.xticks([], minor = True)\n", + " plt.yticks(range(round(maxfreq)+1))\n", + " plt.axis('equal')\n", + " plt.tight_layout()\n", + " if saving:\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"3D2freqheatmap,\" + datestr\n", + " savefigure(savename)\n", + " plt.show()\n", + " \n", + " ## 3D minus 2D\n", + " plt.figure(figsize = (1.555,1.3), dpi= 300 )\n", + " ax,cbar = myheatmap(SSgrid3D-SSgrid2D, \"log average error\",vmin=vmin, vmax=vmax, cmap='magma_r',return_cbar=True); \n", + " if resonatorsystem == 110:\n", + " cbarticks = [0,1,2]\n", + " cbarticklabels = ['$10^'+str(tick)+'$' for tick in cbarticks]\n", + " cbarticklabels[-1] = '>' + cbarticklabels[-1]\n", + " cbar.set_ticks(cbarticks, labels=cbarticklabels)\n", + " plt.title('3D-2D-SVD')\n", + " plt.ylabel('$\\omega_a$ (rad/s)')\n", + " plt.xlabel('$\\omega_b$ (rad/s)')\n", + " if not MONOMER: #resonatorsystem == 11 or resonatorsystem == 110:\n", + " #plt.xticks(ticklist)\n", + " #plt.yticks(ticklist)\n", + " #plt.xticks(range(round(maxfreq)+1))\n", + " plt.xticks([res1, res2])\n", + " plt.xticks([], minor = True)\n", + " plt.yticks(range(round(maxfreq)+1))\n", + " plt.axis('equal')\n", + " plt.tight_layout()\n", + " if saving:\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"3D-2D-2freqheatmap,\" + datestr\n", + " savefigure(savename)\n", + " plt.show()\n", + "\n", + "if not MONOMER:\n", + " \n", + " \n", + " plt.figure(figsize = (1.555,1.3) )\n", + " grid=resultsdfsweep2freqorigmean.pivot_table(\n", + " index = 'R2_phase_noiseless1', columns = 'R1_phase_noiseless2', values = 'log avgsyserr%_1D').sort_index(axis = 0, ascending = False)\n", + " myheatmap(grid, \"log avgsyserr%_1D\", cmap = 'magma_r')#, vmax = 2); \n", + " if resonatorsystem == 11 or resonatorsystem == 110:\n", + " plt.xticks([0, -np.pi/2, -np.pi])\n", + " plt.yticks([0, -np.pi, -2*np.pi])\n", + " \n", + " #plt.axis('equal')\n", + " plt.tight_layout()\n", + " plt.show()\n", + " \n", + " plt.figure(figsize = (1.555,1.3) )\n", + " grid=resultsdfsweep2freqorigmean.pivot_table(\n", + " index = 'R2_phase_noiseless1', columns = 'R1_phase_noiseless2', values = 'Freq1').sort_index(axis = 0, ascending = False)\n", + " myheatmap(grid, \"Freq1\", cmap = 'magma_r')#, vmax = 2); \n", + " if resonatorsystem == 11 or resonatorsystem == 110:\n", + " plt.xticks([0, -np.pi/2, -np.pi])\n", + " plt.yticks([0, -np.pi, -2*np.pi])\n", + " \n", + " #plt.axis('equal')\n", + " plt.tight_layout()\n", + " plt.show()\n", + " \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "plt.figure(figsize = (3,3), dpi= 300 )\n", + "\"\"\"SSgrid1D=resultsdfsweep2freqorigmean.pivot_table(\n", + " index = 'smallest singular value', columns = 'second smallest singular value', values = 'log avgsyserr%_1D').sort_index(axis = 0, ascending = False)\n", + "myheatmap(SSgrid1D, \"log average error (%)\", cmap='rainbow'); \"\"\"\n", + "plt.scatter(x=resultsdfsweep2freqorigmean['smallest singular value'], \n", + " y=resultsdfsweep2freqorigmean['second smallest singular value'],\n", + " c=resultsdfsweep2freqorigmean['log avgsyserr%_1D'],\n", + " vmax = 1,\n", + " s=.5,\n", + " #alpha = .8,\n", + " marker = '.'\n", + " )\n", + "cbar = plt.colorbar()\n", + "cbar.outline.set_visible(False)\n", + "cbar.set_label('log error (%)')\n", + "plt.title('1D-SVD')\n", + "plt.xlabel('$\\lambda_1$')\n", + "plt.ylabel('$\\lambda_2$')\n", + "if False: #resonatorsystem == 11 or resonatorsystem == 110:\n", + " #plt.xticks(ticklist)\n", + " #plt.yticks(ticklist)\n", + " #plt.xticks(range(round(maxfreq)+1))\n", + " plt.xticks([res1, res2])\n", + " plt.xticks([], minor = True)\n", + " plt.yticks(range(round(maxfreq)+1)) \n", + "#plt.axis('equal')\n", + "plt.tight_layout()\n", + "if saving:\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"1D2freqheatmap,\" + datestr\n", + " savefigure(savename)\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "saving = False\n", + "\n", + "plt.figure(figsize = (2,1.3))\n", + "alpha = .01\n", + "ms = .3\n", + "resultsdfsweep2freqorigmean_resort_s = resultsdfsweep2freqorigmean.sort_values(by='second smallest singular value')\n", + "Xs = resultsdfsweep2freqorigmean_resort_s['second smallest singular value']\n", + "plt.plot(Xs, resultsdfsweep2freqorigmean_resort_s['avgsyserr%_3D'], symb, ms = ms, lw=lw, color = co3, alpha = alpha, label = '3D')\n", + "plt.plot(Xs, resultsdfsweep2freqorigmean_resort_s['avgsyserr%_2D'], symb, ms = ms, lw=lw, color = co2, alpha = alpha, label = '2D')\n", + "plt.plot(Xs, resultsdfsweep2freqorigmean_resort_s['avgsyserr%_1D'], symb, ms = ms, lw=lw, color = co1, alpha = alpha, label = '1D')\n", + "\"\"\"plt.plot(X, resultsdfsweep2freqorigmean_resort_s['log avgsyserr%_1D'] -resultsdfsweep2freqorigmean_resort_s['log avgsyserr%_2D']\n", + " , lw=lw, alpha = .5, color = 'k')\n", + "\"\"\"\n", + "plt.xlabel('Second smallest singular value')\n", + "plt.yscale('log')\n", + "if resonatorsystem == -3:\n", + " plt.yticks([1e4,1e2, 1e0, 1e-2])\n", + "#text_color_legend(ncol=3)\n", + "plt.ylabel('Avg err (%)');\n", + "plt.tight_layout()\n", + "if saving:\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"2freq,err_vs_s,\" + datestr\n", + " savefigure(savename)\n", + " resultsdfsweep2freqorigmean_resort_s[['second smallest singular value',\n", + " 'log avgsyserr%_1D',\n", + " 'log avgsyserr%_2D', \n", + " 'log avgsyserr%_3D']].to_csv(savename + '.csv')\n", + "plt.show()\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "resultsdfmeanbyfreq1 = resultsdf.groupby(by=['Freq1'], as_index=False).mean(numeric_only =True)\n", + "X = resultsdfmeanbyfreq1['Freq1'] \n", + "\n", + "plt.figure(figsize=figsize)\n", + "#plt.plot(resultsdf.Freq1, resultsdf['avgsyserr%_3D'] , '.', alpha=.008, color = co3)\n", + "plt.plot(resultsdf.Freq1, resultsdf['avgsyserr%_1D'] , '.', alpha=.008, color = co1)\n", + "plt.plot(resultsdf.Freq1, resultsdf['avgsyserr%_2D'] , '.', alpha=.008, color = co2)\n", + "plt.plot(X, 10**resultsdfmeanbyfreq1['log avgsyserr%_2D'], color = co2, label='2D')\n", + "plt.plot(X, 10**resultsdfmeanbyfreq1['log avgsyserr%_1D'], color = co1,label='1D')\n", + "#plt.ylim(ymin=0, ymax=maxsyserr_to_plot)\n", + "plt.title('$\\omega_b = $' + str(round(resultsdfmean.Freq2.min(),1)) + ' to ' \n", + " + str(round(resultsdfmean.Freq2.max(),1)) + ' rad/s',\n", + " loc='right')\n", + "plt.xlabel('$\\omega_a$ (rad/s)')\n", + "plt.ylabel('avg err (%)')\n", + "plt.yscale('log')\n", + "#plt.xticks([res1, res2]);\n", + "plt.tight_layout()\n", + "plt.show()\n", + "\n", "\n", + "plt.figure(figsize=figsize)\n", + "\n", + "plt.axvline(reslist[0], color='gray', lw=0.5)\n", + "if not MONOMER:\n", + " plt.axvline(reslist[1], color='gray', lw=0.5)\n", + "\n", + "axa=plt.gca()\n", + "colors = [co1, co2, co3]\n", + "dimensions = ['1D', '2D', '3D']\n", + "if resonatorsystem == -3:\n", + " dim = ['1D', '2D']\n", + "else:\n", + " dim = dimensions\n", + "\n", + "# initialize 95% confidence interval columns\n", + "for column in ['E_lower_1D', 'E_upper_1D','E_95range_1D','E_log95range_1D'\\\n", + " 'E_lower_2D', 'E_upper_2D', 'E_95range_2D', 'E_log95range_2D'\\\n", + " 'E_lower_3D', 'E_upper_3D', 'E_95range_3D','E_log95range_3D']:\n", + " resultsdfmeanbyfreq1[column] = np.nan\n", + " \n", + "for f1 in resultsdfmeanbyfreq1['Freq1']:\n", + " for D in dimensions: # ASE stands for average systematic err\n", + " #plt.hist(resultsvaryFreq2[resultsvaryFreq2['Freq2']== f1]['avgsyserr%_1D'])\n", + " ASE = resultsdf[resultsdf['Freq1']== f1]['avgsyserr%_' + D]\n", + " ASE = np.sort(ASE)\n", + " halfalpha = (1 - .95)/2\n", + " ## literally select the 95% confidence interval by tossing out the top 2.5% and the bottom 2.5% \n", + " ## I could do a weighted average to work better with selecting the top 2.5% and bottom 2.5%\n", + " ## But perhaps this is good enough for an estimate. It's ideal if I do 80 measurements.\n", + " lowerbound = np.mean([ASE[int(np.floor(halfalpha*len(ASE)))], ASE[int(np.ceil(halfalpha*len(ASE)))]])\n", + " #print(lowerbound)\n", + " upperbound = np.mean([ASE[-int(np.floor(halfalpha*len(ASE))+1)],ASE[-int(np.ceil(halfalpha*len(ASE))+1)]])\n", + " resultsdfmeanbyfreq1.loc[resultsdfmeanbyfreq1['Freq1']== f1,'E_95range_'+ D] = upperbound - lowerbound\n", + " resultsdfmeanbyfreq1.loc[resultsdfmeanbyfreq1['Freq1']== f1,'E_log95range_'+ D] = np.log10(upperbound) - np.log10(lowerbound)\n", + " resultsdfmeanbyfreq1.loc[resultsdfmeanbyfreq1['Freq1']== f1,'E_lower_'+ D] = lowerbound\n", + " resultsdfmeanbyfreq1.loc[resultsdfmeanbyfreq1['Freq1']== f1,'E_upper_' + D] = upperbound\n", + "\n", + "\n", + "for i in range(len(dim)): \n", + " Yhigh = resultsdfmeanbyfreq1['E_upper_' + dim[i]]\n", + " Ylow = resultsdfmeanbyfreq1['E_lower_' + dim[i]] \n", + " plt.plot(X, Yhigh, color = colors[i], alpha = .3, linewidth=.3)\n", + " plt.plot(X, Ylow, color = colors[i], alpha = .3, linewidth=.3)\n", + " axa.fill_between(X, Ylow, Yhigh, color = colors[i], alpha=.2)\n", + "#plt.plot(X, 10**resultsdfmeanbyfreq1['log avgsyserr%_3D'], color = co3, label='3D')\n", + "plt.plot(X, 10**resultsdfmeanbyfreq1['log avgsyserr%_2D'], color = co2, label='2D')\n", + "plt.plot(X, 10**resultsdfmeanbyfreq1['log avgsyserr%_1D'], color = co1,label='1D')\n", + "plt.yscale('log')\n", + "text_color_legend()\n", + "plt.title('$\\omega_b = $' + str(round(resultsdfmean.Freq2.min(),1)) + ' to ' \n", + " + str(round(resultsdfmean.Freq2.max(),1)) + ' rad/s',\n", + " loc='right')\n", + "plt.xlabel('$\\omega_a$ (rad/s)')\n", + "plt.ylabel('avg err (%)')\n", + "plt.tight_layout()\n", + "plt.show()\n", + "\n", + "plt.figure(figsize = figsize)\n", + "plt.axvline(reslist[0], color='gray', lw=0.5)\n", + "if not MONOMER:\n", + " plt.axvline(reslist[1], color='gray', lw=0.5)\n", + "#plt.plot(X, 10**resultsdfmeanbyfreq1['log avgsyserr%_3D'], color = co3, label='3D')\n", + "plt.plot(X, 10**resultsdfmeanbyfreq1['log avgsyserr%_2D'], color = co2, label='2D')\n", + "plt.plot(X, 10**resultsdfmeanbyfreq1['log avgsyserr%_1D'], color = co1,label='1D')\n", + "text_color_legend()\n", + "#plt.yscale('log')\n", + "#plt.ylim(ymin=0)\n", + "plt.ylim(ymin=0, ymax=10)\n", + "plt.title('$\\omega_b = $' + str(round(resultsdfmean.Freq2.min(),1)) + ' to ' \n", + " + str(round(resultsdfmean.Freq2.max(),1)) + ' rad/s',\n", + " loc='right')\n", + "plt.xlabel('$\\omega_a$ (rad/s)')\n", + "plt.ylabel('avg err (%)');\n", + "plt.tight_layout()\n", + "if saving:\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"2freqavgerr,\" + datestr\n", + " savefigure(savename)\n", + " resultsdfmeanbyfreq1[['Freq1','log avgsyserr%_1D','log avgsyserr%_2D', 'log avgsyserr%_3D']].to_csv(savename + '.csv')\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "if not MONOMER:\n", + " plt.figure(figsize = (1.8,1.3), dpi= 300 ) # *** new subfigure\n", + " grid=resultsdfsweep2freqorigmean.pivot_table(\n", + " index = 'R2_phase_noiseless1', columns = 'R2_phase_noiseless2', values = 'log avgsyserr%_1D').sort_index(axis = 0, ascending = False)\n", + " ax,cbar = myheatmap(grid, \"log avgsyserr%_1D\", vmax = 4, cmap = 'magma_r', return_cbar=True)#, vmax = 2); \n", + " cbarticks = [1,2,3,4]\n", + " cbarticklabels = ['$10^'+str(tick)+'$' for tick in cbarticks]\n", + " cbarticklabels[-1] = '>' + cbarticklabels[-1]\n", + " cbar.set_ticks(cbarticks, labels=cbarticklabels)\n", + " if resonatorsystem == 11 or resonatorsystem == 110:\n", + " plt.xlim(0, 2*np.pi)\n", + " plt.xticks([0, -np.pi, -2*np.pi], labels = ['0','$-\\pi$', '$-2\\pi$'])\n", + " plt.yticks([0, -np.pi, -2*np.pi], labels = ['0','$-\\pi$', '$-2\\pi$'])\n", + " plt.xlabel('$\\phi_{2,b}$')\n", + " plt.ylabel('$\\phi_{2,a}$')\n", + " \n", + " plt.axis('equal')\n", + " plt.tight_layout()\n", + " if True:\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"1D_heatmap_by_phase,\" + datestr\n", + " savefigure(savename)\n", + " plt.show()\n", + " \n", + " \n", + " # *** Do I need to add '1D-SVD' title?" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plt.figure(figsize = figsize)\n", + "plt.axvline(reslist[0], color='gray', lw=0.5)\n", + "if not MONOMER:\n", + " plt.axvline(reslist[1], color='gray', lw=0.5)\n", + "#plt.plot(X, 10**resultsdfmeanbyfreq1['log avgsyserr%_3D'], color = co3, label='3D')\n", + "plt.plot(X, 10**resultsdfmeanbyfreq1['log avgsyserr%_2D'], color = co2, label='2D')\n", + "plt.plot(X, 10**resultsdfmeanbyfreq1['log avgsyserr%_1D'], color = co1,label='1D')\n", + "text_color_legend()\n", + "#plt.yscale('log')\n", + "plt.ylim(ymin=0, ymax=100)\n", + "plt.title('$\\omega_b = $' + str(round(resultsdfmean.Freq2.min(),1)) + ' to ' \n", + " + str(round(resultsdfmean.Freq2.max(),1)) + ' rad/s',\n", + " loc='right')\n", + "plt.xlabel('$\\omega_a$ (rad/s)')\n", + "plt.ylabel('avg err (%)');\n", + "plt.tight_layout()\n", + "if saving:\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"2freqavgerr,\" + datestr\n", + " savefigure(savename)\n", + " resultsdfmeanbyfreq1[['Freq1','log avgsyserr%_1D','log avgsyserr%_2D', 'log avgsyserr%_3D']].to_csv(savename + '.csv')\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print('The average error varies over two orders of magnitude.')\n", + "plt.plot(resultsdfmeanbyfreq1.Freq1,resultsdfmeanbyfreq1.E_log95range_1D)\n", + "plt.plot(resultsdfmeanbyfreq1.Freq1,resultsdfmeanbyfreq1.E_log95range_2D)\n", "\n", - "plt.figure()\n", - "plt.plot(resultsdf.Freq1, resultsdf['log maxsyserr%_2D'] , '.', alpha=.5,)\n", - "#plt.ylim(ymin=0, ymax=maxsyserr_to_plot)\n", "plt.xlabel('Freq1 (rad/s)')\n", - "plt.ylabel('log maxsyserr_2D (%)')\n", - "#plt.xticks([res1, res2]);\n" + "plt.ylabel('Number of orders of magnitude');" ] }, { @@ -4578,9 +5948,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "\"\"\"print('The most likely frequency pair to be 1d nullspace:')\"\"\"\n", @@ -4594,12 +5962,12 @@ " bardisplaylabels = ['K1', 'K2', 'K12','B1','B2','FD','M1','M2','avg', 'rms']\n", "elemslist_2D = [el + '_2D' for el in elemslist]\n", "elemslist = [el+ '_1D' for el in elemslist]\n", - "llist1 = ['Freq1', 'Freq2', 'avgsyserr%_1D-avgsyserr%_2D', 'rmssyserr%_1D', 'rmssyserr%_2D'] + elemslist + elemslist_2D\n", + "llist1 = ['Freq1', 'Freq2', 'R1_phase_noiseless1', 'R1_phase_noiseless2', 'avgsyserr%_1D-avgsyserr%_2D', 'rmssyserr%_1D', 'rmssyserr%_2D'] + elemslist + elemslist_2D\n", "syserrlist = [w + 'syserr%' for w in bardisplaylabels]\n", "syserrlist_2D = [w + '_2D' for w in syserrlist]\n", "syserrlist = [w + '_1D' for w in syserrlist]\n", "\n", - "min_df = resultsdf.iloc[resultsdf['avgsyserr%_1D-avgsyserr%_2D'].argmin()] # most likely to be 1d nullspace\n", + "min_df = resultsdfsweep2freqorigmean.iloc[resultsdfsweep2freqorigmean['avgsyserr%_1D-avgsyserr%_2D'].argmin()] # most likely to be 1d nullspace\n", "display(min_df[llist1])\n", "#min_df[['M1_2D', 'M2_2D', 'B1_2D', 'B2_2D', 'K1_2D', 'K2_2D', 'K12_2D', 'FD_2D',]]\n", "\n", @@ -4611,16 +5979,16 @@ " ax.bar(X + 0.50, syserrdf[syserrlist_2D], color = 'r', width = 0.3)\n", " plt.title('syserrs: 1d blue, 2d red');\n", " \n", - "\"\"\"\n", + "\n", "grapherror_1D_2D(min_df, bardisplaylabels, syserrlist, syserrlist_2D )\n", "plt.show()\n", "\n", - "print('1D nullspace')\n", + "\"\"\"print('1D nullspace')\n", "plot_SVD_results(drive,R1_amp,R1_phase,R2_amp,R2_phase,convert_to_measurementdf(min_df), \n", " min_df.K1_1D, min_df.K2_1D, min_df.K12_1D, min_df.B1_1D, min_df.B2_1D, min_df.FD_1D, min_df.M1_1D, min_df.M2_1D,\n", " MONOMER=MONOMER, forceboth=forceboth,saving=savefig)\n", - "plt.show()\n", - "print('The above is likely to be quite a poor choice of frequencies, since it was selected for having poor 2d nullspace results')\"\"\";" + "plt.show()\"\"\"\n", + "print('The above is likely to be quite a poor choice of frequencies, since it was selected for having poor 2d nullspace results')" ] }, { @@ -4638,15 +6006,14 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ - "best_df = resultsdf.loc[resultsdf['avgsyserr%_1D'].argmin()] # most likely to be good\n", + "best_df = resultsdfsweep2freqorigmean.iloc[resultsdfsweep2freqorigmean['avgsyserr%_1D'].argmin()] # most likely to be good\n", "display(best_df[llist1])\n", - "print('Best 1d results (lowest average syserr):')\n", + "print('Best 1d results (lowest average err):')\n", "grapherror_1D_2D(best_df, bardisplaylabels, syserrlist, syserrlist_2D )\n", + "plt.ylabel('Err (%)')\n", "plt.show()\n", "\n", "'''print('1D nullspace, best choice of two frequencies')\n", @@ -4655,7 +6022,17 @@ " best_df.FD_2D, best_df.M1_2D, best_df.M2_2D,\n", " MONOMER=MONOMER, forceboth=forceboth,saving=savefig)\n", "plt.show()'''\n", - "print('1D nullspace, best choice of two frequencies')" + "print('1D nullspace, best choice of two frequencies')\n", + "\n", + "# -0.4 pi \n", + "# and\n", + "# -.12 pi\n", + "\n", + "# or\n", + "\n", + "# -0.74 pi\n", + "# and\n", + "# -0.454 pi (-5/11 pi)\n" ] }, { @@ -4664,7 +6041,7 @@ "metadata": {}, "outputs": [], "source": [ - "stophere# next sweep one frequency (called freq2)" + "stophere# next sweep one frequency (called freq2) (vary one freq) / sweep freq2 /sweep 1 freq" ] }, { @@ -4696,16 +6073,32 @@ "execution_count": null, "metadata": {}, "outputs": [], + "source": [ + "resonatorsystem" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], "source": [ "#Code that loops through frequency 2 points (of different spacing)\n", "\n", "verbose = True\n", - "repeats = 10\n", + "repeats = 80*20\n", "n = 200\n", "\n", "def sweep_freq2(freq1,drive=drive, vals_set = vals_set, \n", " noiselevel = noiselevel, MONOMER=MONOMER, freq3=None, repeats=repeats):\n", "\n", + " print('Running sweep_freq2 with', repeats, 'repeats: Sweeping freq2 with', len(drive), 'frequencies from', min(drive), 'to', max(drive), \n", + " 'while holding freq1 fixed at', freq1)\n", + " if freq3 is not None:\n", + " print('Holding freq3 fixed at', freq3)\n", + " \n", " [m1_set, m2_set, b1_set, b2_set, k1_set, k2_set, k12_set, F_set] = read_params(vals_set, MONOMER)\n", " \n", " R1_amp, R1_phase, R2_amp, R2_phase, R1_real_amp, R1_im_amp, R2_real_amp, R2_im_amp, privilegedrsqrd = \\\n", @@ -4760,9 +6153,21 @@ " plt.title('R2 complex amplitude')\n", "for ax in twirlax:\n", " ax.axis('equal');\n", - "\n", + " \n", + "if resonatorsystem == 11:\n", + " minfreq = 2.5\n", + " maxfreq = 4.5\n", + " includefreqs = reslist[1:]\n", + "else:\n", + " minfreq = None\n", + " maxfreq = None\n", + " includefreqs = reslist\n", + " \n", + "print('Choosing drive frequencies, which must include', includefreqs)\n", "## Choose driving frequencies\n", - "chosendrive, morefrequencies = create_drive_arrays(vals_set = vals_set, forceboth=forceboth, includefreqs = reslist,\n", + "chosendrive, morefrequencies = create_drive_arrays(vals_set = vals_set, forceboth=forceboth, \n", + " includefreqs = includefreqs,\n", + " minfreq = minfreq, maxfreq = maxfreq, staywithinlims = True,\n", " MONOMER = MONOMER, n=n, morefrequencies = morefrequencies)\n", "\n", "plt.figure()\n", @@ -4770,6 +6175,7 @@ " e=0, MONOMER=MONOMER, forceboth=forceboth), '.')\n", "plt.xlabel('Freq2')\n", "plt.ylabel('R1 phase')\n", + "plt.show()\n", "\n", "reset_ideal_freq3 = False\n", "if reset_ideal_freq3:\n", @@ -4822,7 +6228,6 @@ "### Run second to figure out ideal 3-frequency or Run once to figure out ideal 2-frequency\n", "# Ran 50 times in 21.149 sec\n", "before = time()\n", - "repeats = 5\n", "print('Running with fixed freqs: ' + str(res1) + ', ' + str(freq3))\n", "for i in range(1):\n", " thisres = sweep_freq2(freq1 = res1,drive=chosendrive, vals_set = vals_set, noiselevel = noiselevel, MONOMER=MONOMER, \n", @@ -4867,13 +6272,22 @@ "if freq3 is not None:\n", " freq_label.append(freq3)\n", "#freq_label = np.unique(np.array(freq_label))\n", - "\n", + "\"\"\"\n", "plotcomplex(complexZ = results_sweep_1freq.R1_amp_meas2, parameter = results_sweep_1freq.Freq2, ax = ax5, \n", " label_markers = freq_label)\n", "if not MONOMER:\n", " plotcomplex(complexZ = results_sweep_1freq.R2_amp_meas2, parameter = results_sweep_1freq.Freq2, ax = ax6)\n", " #ax6.plot(np.real(results_sweep_1freq.R2AmpCom2), np.imag(results_sweep_1freq.R2AmpCom2), '.')\n", - " " + " \"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "_, d1, _, d2, _, _, _, _, _ = calculate_spectra(np.array(res1), vals_set, noiselevel, MONOMER, forceboth)" ] }, { @@ -4885,9 +6299,65 @@ "# some thoughts about alpha transparency\n", "\"\"\"1000 -> .1\n", "5400 -> .03\"\"\";\n", + "repeats = len(results_sweep_1freq)\n", + "repeats" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "repeats % 80 # want this to be 0" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "repeats / 80" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "len(results_sweep_1freq.Freq2.unique())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "len(results_sweep_1freq)" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "len(results_sweep_1freq) / len(results_sweep_1freq.Freq2.unique())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "results_sweep_1freq, results_sweep_1freqmean = \\\n", + " calc_error_interval(results_sweep_1freq, results_sweep_1freqmean, groupby='Freq2', fractionofdata = .95)\n" + ] + }, { "cell_type": "code", "execution_count": null, @@ -4900,23 +6370,51 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, "outputs": [], "source": [ "from matplotlib.ticker import AutoLocator\n", "widths=.03 # widths of boxplots\n", - "figsize=(15, 5)\n", + "if True:\n", + " lw=1\n", + " figsize=(15, 5)\n", + " if repeats > 100:\n", + " alpha=.03\n", + " elif repeats > 50:\n", + " alpha=.1\n", + " elif repeats > 25:\n", + " alpha=.3\n", + " else:\n", + " alpha=.5\n", + " ms = 10\n", + "else:\n", + " lw = 1\n", + " ms = 2\n", + " figsize = (3.2, 1.3)\n", + " if repeats > 20000:\n", + " alpha = .003\n", + " elif repeats > 100:\n", + " alpha=.03\n", + " elif repeats > 50:\n", + " alpha=.1\n", + " elif repeats > 25:\n", + " alpha=.3\n", + " else:\n", + " alpha=.5\n", "symb='.'\n", - "if repeats > 100:\n", - " alpha=.03\n", - "elif repeats > 50:\n", - " alpha=.1\n", - "elif repeats > 25:\n", - " alpha=.3\n", + "\n", + "if MONOMER:\n", + " Rnote = ''\n", "else:\n", - " alpha=.5\n", + " Rnote = ' at R1'\n", + "\n", "\n", "results_sweep_1freq.boxplot(column='log avgsyserr%_2D', by='Freq2', grid=False, #fontsize=7, rot=90, \n", " positions=results_sweep_1freq.Freq2.unique(), widths=widths, \n", @@ -4930,7 +6428,10 @@ "plt.gca().xaxis.set_major_locator(AutoLocator()) \n", "plt.show()\n", "\n", - "results_sweep_1freq.boxplot(column='log avgsyserr%_1D', by='Freq2', grid=False, #fontsize=7, rot=90, \n", + "results_sweep_1freq.boxplot(column='log avgsyserr%_1D', \n", + " by='Freq2',\n", + " #by = 'R1Phase2_wrap',\n", + " grid=False, #fontsize=7, rot=90, \n", " positions=results_sweep_1freq.Freq2.unique(), widths=widths, \n", " #color='k', \n", " showmeans=True, \n", @@ -4941,40 +6442,117 @@ "plt.title('');\n", "\n", "plt.figure(figsize=figsize) # remove this to overplot the boxplots\n", - "plt.plot(results_sweep_1freq.Freq2, results_sweep_1freq['log avgsyserr%_2D'], '.', color=co2, alpha=alpha )\n", - "plt.plot(results_sweep_1freq.Freq2, results_sweep_1freq['log avgsyserr%_1D'], '.', color=co1, alpha=alpha)\n", - "plt.plot(results_sweep_1freqmean.Freq2, results_sweep_1freqmean['log avgsyserr%_2D'], color=co2, label='2D' )\n", - "plt.plot(results_sweep_1freqmean.Freq2, results_sweep_1freqmean['log avgsyserr%_1D'], color=co1, label='1D')\n", + "plt.axvline(res1, color='grey')\n", + "plt.plot(results_sweep_1freq.Freq2, results_sweep_1freq['avgsyserr%_2D'], '.', \n", + " ms = ms, color=co2, alpha=alpha )\n", + "plt.plot(results_sweep_1freq.Freq2, results_sweep_1freq['avgsyserr%_1D'], '.', \n", + " ms = ms, color=co1, alpha=alpha)\n", + "plt.plot(results_sweep_1freqmean.Freq2, 10**results_sweep_1freqmean['log avgsyserr%_2D'], color=co2, lw=lw, label='2D' )\n", + "plt.plot(results_sweep_1freqmean.Freq2, 10**results_sweep_1freqmean['log avgsyserr%_1D'], color=co1, lw=lw, label='1D')\n", "plt.xlabel('Freq2')\n", - "plt.legend()\n", - "plt.ylabel('log avgsyserr%_1D');\n", + "text_color_legend()\n", + "plt.ylabel('Avg err (%)');\n", + "plt.yscale('log')\n", + "plt.show()\n", + "\n", + "beep()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "figsize = (8,4)\n", + "\n", + "if MONOMER:\n", + " Rnote = ''\n", + " x_axis_phase = 'R1Phase2_wrap'\n", + "else:\n", + " x_axis_phase = 'R2Phase2_wrap'\n", + " Rnote = ' at R2'\n", + "\n", + "\n", + "plt.figure(figsize=figsize) \n", + "# calculations\n", + "results_sweep_1freq[x_axis_phase]=results_sweep_1freq.R1_phase_noiseless2%(2*np.pi) - 2*np.pi\n", + "results_sweep_1freq_resort1=results_sweep_1freq.sort_values(by=x_axis_phase)\n", + "results_sweep_1freq_resort1mean=results_sweep_1freq_resort1.groupby(by=['Freq2'], as_index=False).mean()\n", + "\n", + "results_sweep_1freq, results_sweep_1freq_resort1mean = \\\n", + " calc_error_interval(results_sweep_1freq, results_sweep_1freq_resort1mean, groupby='Freq2', fractionofdata = .95)\n", + "\n", + "# plotting\n", + "plt.figure(figsize=figsize) \n", + "try:\n", + " plt.axvline(d1/np.pi, color='grey')\n", + "except NameError:\n", + " print('Calculating phase is broken')\n", + "plt.plot(results_sweep_1freq_resort1[x_axis_phase]/np.pi, results_sweep_1freq_resort1['avgsyserr%_3D'], \n", + " '.', ms = ms,color=co3, alpha=alpha )\n", + "plt.plot(results_sweep_1freq_resort1[x_axis_phase]/np.pi, results_sweep_1freq_resort1['avgsyserr%_2D'], \n", + " '.', ms = ms,color=co2, alpha=alpha )\n", + "plt.plot(results_sweep_1freq_resort1[x_axis_phase]/np.pi, results_sweep_1freq_resort1['avgsyserr%_1D'], \n", + " '.', ms = ms,color=co1, alpha=alpha)\n", + "\n", + "plt.plot(results_sweep_1freq_resort1mean[x_axis_phase]/np.pi, 10**results_sweep_1freq_resort1mean['log avgsyserr%_3D'], \n", + " lw=lw,color=co3, label='3D' )\n", + "plt.plot(results_sweep_1freq_resort1mean[x_axis_phase]/np.pi, 10**results_sweep_1freq_resort1mean['log avgsyserr%_2D'], \n", + " lw=lw,color=co2, label='2D' )\n", + "plt.plot(results_sweep_1freq_resort1mean[x_axis_phase]/np.pi, 10**results_sweep_1freq_resort1mean['log avgsyserr%_1D'], \n", + " lw=lw,color=co1, label='1D')\n", + "\n", + "#plt.xlim(xmin=-np.pi, xmax=np.pi)\n", + "plt.xlabel('Phase of Freq2'+ Rnote+' ($\\pi$)')\n", + "plt.xticks([-1,-3/4, -1/2, -1/4, 0])\n", + "text_color_legend()\n", + "plt.ylabel('avgsyserr (%)');\n", + "plt.yscale('log')\n", "plt.show()\n", "\n", + "# Export figure\n", "plt.figure(figsize=figsize) \n", - "results_sweep_1freq['R1Phase2_wrap']=results_sweep_1freq.R1_phase_noiseless2%(2*np.pi) - 2*np.pi\n", - "results_sweep_1freq_resort1=results_sweep_1freq.sort_values(by='R1Phase2_wrap')\n", - "results_sweep_1freq_resort1mean=results_sweep_1freq_resort1.groupby(by=['Freq2']).mean()\n", - "\n", - "plt.plot(results_sweep_1freq_resort1.R1Phase2_wrap/np.pi, results_sweep_1freq_resort1['log avgsyserr%_3D'], \n", - " '.', color=co3, alpha=alpha )\n", - "plt.plot(results_sweep_1freq_resort1.R1Phase2_wrap/np.pi, results_sweep_1freq_resort1['log avgsyserr%_2D'], \n", - " '.', color=co2, alpha=alpha )\n", - "plt.plot(results_sweep_1freq_resort1.R1Phase2_wrap/np.pi, results_sweep_1freq_resort1['log avgsyserr%_1D'], \n", - " '.', color=co1, alpha=alpha)\n", - "\n", - "plt.plot(results_sweep_1freq_resort1mean.R1Phase2_wrap/np.pi, results_sweep_1freq_resort1mean['log avgsyserr%_3D'], \n", - " color=co3, label='3D' )\n", - "plt.plot(results_sweep_1freq_resort1mean.R1Phase2_wrap/np.pi, results_sweep_1freq_resort1mean['log avgsyserr%_2D'], \n", - " color=co2, label='2D' )\n", - "plt.plot(results_sweep_1freq_resort1mean.R1Phase2_wrap/np.pi, results_sweep_1freq_resort1mean['log avgsyserr%_1D'], \n", - " color=co1, label='1D')\n", + "axa = plt.gca()\n", + "plt.figure(figsize=figsize) \n", + "try:\n", + " plt.axvline(d1/np.pi, color='grey')\n", + "except NameError:\n", + " print('Calculating phase is broken')\n", + "\n", + "dimensions = ['3D', '2D', '1D']\n", + "colors = [co3, co2, co1]\n", + "X = results_sweep_1freq_resort1mean[x_axis_phase]/np.pi \n", + "for i in range(3):\n", + " Yhigh = results_sweep_1freq_resort1mean['E_upper_' + dimensions[i]]\n", + " Ylow = results_sweep_1freq_resort1mean['E_lower_' + dimensions[i]] \n", + " plt.plot(X, Yhigh, color = colors[i], alpha = .3, linewidth=.3)\n", + " plt.plot(X, Ylow, color = colors[i], alpha = .3, linewidth=.3)\n", + " axa.fill_between(X, Ylow, Yhigh, color = colors[i], alpha=.2)\n", + "\n", + "plt.plot(results_sweep_1freq_resort1mean[x_axis_phase]/np.pi, 10**results_sweep_1freq_resort1mean['log avgsyserr%_3D'], \n", + " lw=lw,color=co3, label='3D' )\n", + "plt.plot(results_sweep_1freq_resort1mean[x_axis_phase]/np.pi, 10**results_sweep_1freq_resort1mean['log avgsyserr%_2D'], \n", + " lw=lw,color=co2, label='2D' )\n", + "plt.plot(results_sweep_1freq_resort1mean[x_axis_phase]/np.pi, 10**results_sweep_1freq_resort1mean['log avgsyserr%_1D'], \n", + " lw=lw,color=co1, label='1D')\n", "\n", "#plt.xlim(xmin=-np.pi, xmax=np.pi)\n", - "plt.xlabel('Phase of Freq2 at R1 ($\\pi$)')\n", - "plt.legend()\n", - "plt.ylabel('log avgsyserr (%)');\n", + "plt.xlabel('Phase of Freq2'+ Rnote+' ($\\pi$)')\n", + "plt.xticks([-1,-3/4, -1/2, -1/4, 0])\n", + "text_color_legend()\n", + "plt.ylabel('avgsyserr (%)');\n", + "plt.yscale('log')\n", + "plt.tight_layout()\n", + "if saving:\n", + " datestr = datestring()\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"sweepfreq2,\" + datestr\n", + " savefigure(savename)\n", + " results_sweep_1freq_resort1mean[[x_axis_phase,'log avgsyserr%_1D','log avgsyserr%_2D','log avgsyserr%_3D']].to_csv(\n", + " savename + '.csv')\n", "plt.show()\n", "\n", + "\n", "\"\"\"results_sweep_1freq_resort=results_sweep_1freq.sort_values(by='R1_phase_noiseless2')\n", "results_sweep_1freq_resort.boxplot(column='log avgsyserr%_1D', by='R1_phase_noiseless2', grid=False, fontsize=7, rot=90, \n", " positions=results_sweep_1freq_resort.R1_phase_noiseless2.unique(), \n", @@ -4986,41 +6564,41 @@ "plt.title('');\"\"\"\n", "\n", "fig, ax=plt.subplots(subplot_kw={'projection': 'polar'})\n", - "ax.plot(results_sweep_1freq_resort1.R1Phase2_wrap, results_sweep_1freq_resort1['log avgsyserr%_1D'], '.', color=co1, alpha=alpha)\n", - "ax.plot(results_sweep_1freq_resort1mean.R1Phase2_wrap, results_sweep_1freq_resort1mean['log avgsyserr%_1D'], color=co1 )\n", - "\n", + "ax.plot(results_sweep_1freq_resort1[x_axis_phase], results_sweep_1freq_resort1['log avgsyserr%_1D'], '.', color=co1, alpha=alpha)\n", + "ax.plot(results_sweep_1freq_resort1mean[x_axis_phase], results_sweep_1freq_resort1mean['log avgsyserr%_1D'], color=co1 )\n", + "plt.title('Log Avg Err 1D (%)')\n", "plt.show()\n", "\n", "\n", "plt.figure(figsize=figsize)\n", - "plt.plot(results_sweep_1freq.arclength_R1, results_sweep_1freq['log avgsyserr%_1D'], '.', color=co1, alpha=alpha)\n", - "plt.plot(results_sweep_1freq.arclength_R1, results_sweep_1freq['log avgsyserr%_2D'], '.', color=co2, alpha=alpha )\n", - "plt.plot(results_sweep_1freqmean.arclength_R1, results_sweep_1freqmean['log avgsyserr%_1D'], color=co1, label='1D')\n", - "plt.plot(results_sweep_1freqmean.arclength_R1, results_sweep_1freqmean['log avgsyserr%_2D'], color=co2, label='2D' )\n", + "plt.plot(results_sweep_1freq.arclength_R1, results_sweep_1freq['log avgsyserr%_1D'], '.', ms = ms,color=co1, alpha=alpha)\n", + "plt.plot(results_sweep_1freq.arclength_R1, results_sweep_1freq['log avgsyserr%_2D'], '.', ms = ms,color=co2, alpha=alpha )\n", + "plt.plot(results_sweep_1freqmean.arclength_R1, results_sweep_1freqmean['log avgsyserr%_1D'], lw=lw,color=co1, label='1D')\n", + "plt.plot(results_sweep_1freqmean.arclength_R1, results_sweep_1freqmean['log avgsyserr%_2D'], lw=lw, color=co2, label='2D' )\n", "plt.xlabel('arclength_R1')\n", - "plt.legend()\n", + "text_color_legend()\n", "plt.ylabel('log avgsyserr%_1D');\n", "plt.show()\n", "\n", "plt.figure(figsize=figsize)\n", - "plt.plot(np.degrees(results_sweep_1freq.modifiedangle_R1), results_sweep_1freq['log avgsyserr%_1D'], '.', color=co1, alpha=alpha)\n", - "plt.plot(np.degrees(results_sweep_1freq.modifiedangle_R1), results_sweep_1freq['log avgsyserr%_2D'], '.', color=co2, alpha=alpha )\n", - "plt.plot(np.degrees(results_sweep_1freqmean.modifiedangle_R1), results_sweep_1freqmean['log avgsyserr%_1D'], color=co1, label='1D')\n", - "plt.plot(np.degrees(results_sweep_1freqmean.modifiedangle_R1), results_sweep_1freqmean['log avgsyserr%_2D'], color=co2, label='2D')\n", + "plt.plot(np.degrees(results_sweep_1freq.modifiedangle_R1), results_sweep_1freq['log avgsyserr%_1D'], '.', ms = ms,color=co1, alpha=alpha)\n", + "plt.plot(np.degrees(results_sweep_1freq.modifiedangle_R1), results_sweep_1freq['log avgsyserr%_2D'], '.', ms = ms,color=co2, alpha=alpha )\n", + "plt.plot(np.degrees(results_sweep_1freqmean.modifiedangle_R1), results_sweep_1freqmean['log avgsyserr%_1D'], lw=lw,color=co1, label='1D')\n", + "plt.plot(np.degrees(results_sweep_1freqmean.modifiedangle_R1), results_sweep_1freqmean['log avgsyserr%_2D'], lw=lw,color=co2, label='2D')\n", "#plt.xlim(xmin=-np.pi, xmax=np.pi)\n", - "plt.xlabel('Twirly angle (deg) of Freq2 at R1')\n", - "plt.legend()\n", + "plt.xlabel('Twirly angle (deg) of Freq2'+ Rnote)\n", + "text_color_legend()\n", "plt.ylabel('log avgsyserr%_1D');\n", "plt.show()\n", "\n", "## Benjamin likes this one\n", "fig, ax=plt.subplots(subplot_kw={'projection': 'polar'})\n", - "ax.plot((results_sweep_1freq.modifiedangle_R1), results_sweep_1freq['log avgsyserr%_3D'], '.', color=co3, alpha=alpha/5)\n", - "ax.plot((results_sweep_1freq.modifiedangle_R1), results_sweep_1freq['log avgsyserr%_2D'], '.', color=co2, alpha=alpha/5)\n", - "ax.plot((results_sweep_1freq.modifiedangle_R1), results_sweep_1freq['log avgsyserr%_1D'], '.', color=co1, alpha=alpha/5)\n", - "ax.plot(results_sweep_1freqmean.modifiedangle_R1, results_sweep_1freqmean['log avgsyserr%_3D'],color=co3,)\n", - "ax.plot(results_sweep_1freqmean.modifiedangle_R1, results_sweep_1freqmean['log avgsyserr%_2D'],color=co2,)\n", - "ax.plot(results_sweep_1freqmean.modifiedangle_R1, results_sweep_1freqmean['log avgsyserr%_1D'],color=co1,)\n", + "ax.plot((results_sweep_1freq.modifiedangle_R1), results_sweep_1freq['log avgsyserr%_3D'], '.', ms = ms,color=co3, alpha=alpha/5)\n", + "ax.plot((results_sweep_1freq.modifiedangle_R1), results_sweep_1freq['log avgsyserr%_2D'], '.', ms = ms,color=co2, alpha=alpha/5)\n", + "ax.plot((results_sweep_1freq.modifiedangle_R1), results_sweep_1freq['log avgsyserr%_1D'], '.', ms = ms,color=co1, alpha=alpha/5)\n", + "ax.plot(results_sweep_1freqmean.modifiedangle_R1, results_sweep_1freqmean['log avgsyserr%_3D'],lw=lw,color=co3,)\n", + "ax.plot(results_sweep_1freqmean.modifiedangle_R1, results_sweep_1freqmean['log avgsyserr%_2D'],lw=lw,color=co2,)\n", + "ax.plot(results_sweep_1freqmean.modifiedangle_R1, results_sweep_1freqmean['log avgsyserr%_1D'],lw=lw,color=co1,)\n", "ax.set_theta_zero_location(\"S\") #south\n", "plt.show()\n", "\n", @@ -5036,7 +6614,7 @@ " label='2D', fontsize=7, rot=90 )\n", "plt.xticks([minfreq, maxfreq] + [round(w,3) for w in reslist])\n", "plt.xlabel('Freq2');\n", - "#plt.legend()\n", + "#text_color_legend()\n", "#plt.ylabel('log avgsyserr%_1D');\n", "\"\"\";\n", "\n", @@ -5049,9 +6627,80 @@ "metadata": {}, "outputs": [], "source": [ - "def datestring():\n", - " return datetime.today().strftime('%Y-%m-%d %H;%M;%S')\n", + "res1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# saved:\n", + "# G:\\Shared drives\\Horowitz Lab Notes\\Horowitz, Viva - notes and files\\simulation_export\\2023-02-06 23;30;37results_sweep_1freq.csv\n", + "# smaller file saved:\n", + "# sys11,2023-02-06 23;30;37results_sweep_1freq_limitedcolumns.csv\n", + "\n", + "## for publication figure\n", + "\n", + "results_sweep_1freq, results_sweep_1freqmean = \\\n", + " calc_error_interval(results_sweep_1freq, results_sweep_1freqmean, groupby='Freq2', fractionofdata = .95)\n", + "\n", + "figsize = (2.3, .9)\n", + "\n", + "plt.figure(figsize=figsize, dpi = 600) # *** for dimer figure, in progress\n", + "ax = plt.gca()\n", + "lw = 1 # heavier line for the mean\n", + "plt.axvline(res1, color='grey', lw = 0.5)\n", + "#plt.plot(results_sweep_1freq.Freq2, results_sweep_1freq['avgsyserr%_2D'], '.', \n", + "# ms = ms, color=co2, alpha=alpha )\n", + "#plt.plot(results_sweep_1freq.Freq2, results_sweep_1freq['avgsyserr%_1D'], '.', \n", + "# ms = ms, color=co1, alpha=alpha)\n", + "\n", + "dimensions = [ '2D', '1D']\n", + "colors = [ co2, co1]\n", + "X = results_sweep_1freqmean.Freq2 \n", + "for i in range(len(dimensions)):\n", + " Yhigh = results_sweep_1freq_resort1mean['E_upper_' + dimensions[i]]\n", + " Ylow = results_sweep_1freq_resort1mean['E_lower_' + dimensions[i]] \n", + " plt.plot(X, Yhigh, color = colors[i], alpha = .8, linewidth=.5) # thinner line for the extremes\n", + " plt.plot(X, Ylow, color = colors[i], alpha = .8, linewidth=.5)\n", + " ax.fill_between(X, Ylow, Yhigh, color = colors[i], alpha=.2)\n", + " \n", + "plt.plot(results_sweep_1freqmean.Freq2, 10**results_sweep_1freqmean['log avgsyserr%_2D'], color=co2, lw=lw, label='2D' )\n", + "plt.plot(results_sweep_1freqmean.Freq2, 10**results_sweep_1freqmean['log avgsyserr%_1D'], color=co1, lw=lw, label='1D')\n", + "plt.xlabel('Freq2')\n", + "#W = approx_width(k2_set, m2_set, b2_set)\n", + "#plt.xlim(xmin = res2-1, xmax = res2+1) #****\n", + "plt.xlim(2.5,4.5)\n", + "plt.ylim(6e-2, 3e3)\n", + "#text_color_legend()\n", + "plt.ylabel('Avg err (%)');\n", + "plt.yscale('log')\n", + "plt.yticks([10**-1,10**0, 10**1, 10**2, 10**3])\n", + "plt.xlabel('$\\omega_b$ (rad/s)')\n", + "\n", "datestr = datestring()\n", + "results_sweep_1freqmean[['Freq1','Freq2','log avgsyserr%_1D', 'log avgsyserr%_2D', 'log avgsyserr%_3D', \n", + " 'E_lower_1D', 'E_upper_1D' ,\n", + " 'E_lower_2D', 'E_upper_2D',\n", + " 'E_lower_3D', 'E_upper_3D']].to_csv(os.path.join(savefolder,\n", + " 'sys' + str(resonatorsystem) + ',' + datestr + \"results_sweep_1freq_limitedcolumns.csv\"));\n", + "\n", + "if saving:\n", + " savename = \"sys\" + str(resonatorsystem) + ','+ \"sweep1freq,\" + datestr\n", + " savefigure(savename)\n", + "plt.show()\n", + "\n", + "beep()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "results_sweep_1freq.to_csv(os.path.join(savefolder,\n", " datestr + \"results_sweep_1freq.csv\"));\n", "results_sweep_1freq.to_pickle(os.path.join(savefolder,\n", @@ -5087,7 +6736,7 @@ "metadata": {}, "outputs": [], "source": [ - "# varying 2 param / vary2param / vary 2param vary\n", + "# varying 2 param / vary2param / vary 2param vary / vary two params\n", "\n", "def vary2param(paramname1 = 'm2', param_list1 = np.linspace(0.1, 60, num = 100),\n", " paramname2 = 'F',param_list2 = np.linspace(0.1, 60, num = 100),\n", @@ -5263,9 +6912,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "symb = '.'\n", @@ -5477,7 +7124,7 @@ " print('Saved: ' + os.path.join(savefolder,\n", " datestr + name + '.csv'))\n", "else:\n", - " resultsdoedf=pd.read_pickle(r'G:\\Shared drives\\Horowitz Lab Notes\\Horowitz, Viva - notes and files\\2022-07-23 01;43;31resultsdoe, movepeaks.pkl')" + " resultsdoedf=pd.read_pickle(r'2022-07-23 01;43;31resultsdoe, movepeaks.pkl')" ] }, { @@ -5822,9 +7469,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "with pd.option_context('display.max_rows', None,):\n", @@ -6194,7 +7839,6 @@ "outputs": [], "source": [ "fig, (ax1, ax2) = plt.subplots(1,2, figsize= (10,5), sharex = 'all', sharey = 'all')\n", - "#***\n", "plt.sca(ax1)\n", "dim = '1D'\n", "cc = 'log meanSNR_R1'\n", @@ -6287,9 +7931,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "\"\"\" DOE experiment: vary the noiselevel, driving force, and number of frequencies.\n", @@ -6388,7 +8030,11 @@ " frequencycolumn = 'drive', complexamplitude1 = 'R1AmpCom', \n", " complexamplitude2 = 'R2AmpCom',\n", " MONOMER=MONOMER, forceboth=forceboth, dtype =complex)\n", - " u, s, vh = np.linalg.svd(Zmatrix, full_matrices = True)\n", + " try:\n", + " u, s, vh = np.linalg.svd(Zmatrix, full_matrices = True)\n", + " except LinAlgError:\n", + " print('Could not solve')\n", + " continue\n", " vh = make_real_iff_real(vh)\n", "\n", " ## 1D NULLSPACE\n", @@ -6498,9 +8144,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "#display(resultsdoethreedf.transpose())\n", @@ -6573,9 +8217,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "doe5 = pyDOE2.fullfact([2,2,2,2,2]) \n", @@ -7140,9 +8782,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "# clearchoice is so named because it's the df of those experiments for which you better know 1D or 2D.\n", @@ -7158,9 +8798,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "#display(resultsdoechooseSNR.transpose())\n", @@ -7234,9 +8872,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "corr = resultsdoechooseSNR[llist3].corr()\n", @@ -7246,13 +8882,6 @@ "sns.heatmap(corr, vmax = 1, vmin=-1, cmap ='PiYG'); # correlation can run from -1 to 1." ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, { "cell_type": "code", "execution_count": null, @@ -7472,7 +9101,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.12" + "version": "3.10.14" } }, "nbformat": 4, diff --git a/SVD algebraic approach.nb b/NetMAP 4-mass.nb similarity index 100% rename from SVD algebraic approach.nb rename to NetMAP 4-mass.nb diff --git a/resonatorSVDanalysis.py b/NetMAP.py similarity index 83% rename from resonatorSVDanalysis.py rename to NetMAP.py index dbafb82..e8e32cb 100644 --- a/resonatorSVDanalysis.py +++ b/NetMAP.py @@ -2,25 +2,34 @@ """ Created on Tue Aug 9 16:50:38 2022 +NetMAP: Create script-Z matrix and find its kernel, or null-space. + @author: vhorowit """ - import numpy as np import math from resonatorphysics import res_freq_weak_coupling from helperfunctions import read_params from resonatorfrequencypicker import res_freq_numeric -"""Zmatrix2resonators(df) will return a matrix for svd for any number of frequency measurements, +"""Zmatrix2resonators(df,...) will return a matrix for svd for any number of frequency measurements, listed in each row of the dataframe measurementdf If forceboth is true then both masses receive a force. -parameternames = ['m1', 'm2', 'b1', 'b2', 'k1', 'k2','c12', 'Driving Force'] +parameternames = ['m1', 'm2', 'b1', 'b2', 'k1', 'k2','k12', 'Driving Force'] """ + def Zmatrix2resonators(measurementdf, forceboth, frequencycolumn = 'drive', complexamplitude1 = 'R1AmpCom', complexamplitude2 = 'R2AmpCom', dtype=complex): - Zmatrix = [] + + ## Are both masses being pushed? or just the first? + if forceboth: + ff = -1 + else: + ff = 0 + + Zmatrix = [] # this would likely be more efficient as a numpy array. for rowindex in measurementdf.index: w = measurementdf[frequencycolumn][rowindex] #print(w) @@ -29,15 +38,12 @@ def Zmatrix2resonators(measurementdf, forceboth, # Matrix columns: m1, m2, b1, b2, k1, k2, k12, F1 Zmatrix.append([-w**2*np.real(ZZ1), 0, -w*np.imag(ZZ1), 0, np.real(ZZ1), 0, np.real(ZZ1)-np.real(ZZ2), -1]) Zmatrix.append([-w**2*np.imag(ZZ1), 0, w*np.real(ZZ1), 0, np.imag(ZZ1), 0, np.imag(ZZ1)-np.imag(ZZ2), 0]) - if forceboth: - Zmatrix.append([0, -w**2*np.real(ZZ2), 0, -w*np.imag(ZZ2), 0, np.real(ZZ2), np.real(ZZ2)-np.real(ZZ1), -1]) - else: - Zmatrix.append([0, -w**2*np.real(ZZ2), 0, -w*np.imag(ZZ2), 0, np.real(ZZ2), np.real(ZZ2)-np.real(ZZ1), 0]) + Zmatrix.append([0, -w**2*np.real(ZZ2), 0, -w*np.imag(ZZ2), 0, np.real(ZZ2), np.real(ZZ2)-np.real(ZZ1), ff]) Zmatrix.append([0, -w**2*np.imag(ZZ2), 0, w*np.real(ZZ2), 0, np.imag(ZZ2), np.imag(ZZ2)-np.imag(ZZ1), 0]) #display(Zmatrix) return np.array(Zmatrix, dtype=dtype) -"""ZmatrixMONOMER(df) will return a matrix for svd for any number of frequency measurements, +"""ZmatrixMONOMER(df,...) will return a matrix for svd for any number of frequency measurements, listed in each row of the dataframe measurementdf parameternames = ['m1', 'b1', 'k1', 'Driving Force'] """ @@ -55,7 +61,7 @@ def ZmatrixMONOMER(measurementdf, #display(Zmatrix) return np.array(Zmatrix, dtype=dtype) -def Zmat(measurementdf, MONOMER, forceboth, +def Zmat(measurementdf, MONOMER, forceboth, frequencycolumn = 'drive', complexamplitude1 = 'R1AmpCom', complexamplitude2 = 'R2AmpCom', dtype=complex, ): if MONOMER: @@ -71,7 +77,7 @@ def Zmat(measurementdf, MONOMER, forceboth, """ 1d nullspace normalization """ def normalize_parameters_1d_by_force(unnormalizedparameters, F_set): - # parameters vector: 'm1', 'm2', 'b1', 'b2', 'k1', 'k2','c12', 'Driving Force' + # parameters vector: 'm1', 'm2', 'b1', 'b2', 'k1', 'k2','k12', 'Driving Force' c = F_set / unnormalizedparameters[-1] parameters = [c*unnormalizedparameters[k] for k in range(len(unnormalizedparameters)) ] return parameters @@ -350,4 +356,69 @@ def normalize_parameters_assuming_3d(vh, vals_set, MONOMER, known1 = None, known if verbose: print('Parameters 3D: ') print(parameters) - return parameters, coefa, coefb, coefc \ No newline at end of file + return parameters, coefa, coefb, coefc + + + +'''Below, I (Lydia) am practicing using the code for monomer and dimer. +I used the simulation to find complex amplitudes from specific frequencies. +''' + +#MONOMER +#parameters used: m=4, b=0.01, k=16 f=1 +#what is measurementdf +#created my own dataframe, but you can import a file for this + +import pandas as pd +datamono = { + 'drive' : [2, 2.0025], + 'R1AmpCom': [4.014494396865636+ (1j*-49.678113600020566), -19.98251374031844 + (1j*-9.997497505306109)], + 'R2AmpCom': [0,0] +} +measurementdfmono = pd.DataFrame(datamono) + +#Calculate Zmatrix +#Doesn't matter which kind of system you have, this function will know (monomer or dimer) +monozmatrix = Zmat(measurementdfmono, True, False, + frequencycolumn = 'drive', complexamplitude1 = 'R1AmpCom', complexamplitude2 = 'R2AmpCom', dtype=complex) + +#perform SVD and extract the unnormalized parameters +#U - left singular vectors +#S - singular values +#V - right singular vectors +#Vh - transpose of right singular vectors +U_m, S_m, Vh_m = np.linalg.svd(monozmatrix) +V_m = Vh_m.conj().T + +#need to figure out how to get the smallest value from S and match it to a column of V +#but I just did it manually for now by looking at S +notnormparam_mono = V_m[:,-1] + +#Now, let's normalize them so we can get the real values! + +#setting force equal to force we used in simulation (in this case 1N) +final_mono = normalize_parameters_1d_by_force(notnormparam_mono, 1) + +#[m_1, b_1, k_1, Force] +#and it checks out!! + +#DIMER +#parameters: [k_1, k_2, k_12, b_1, b_2, F, m_1, m_2] +# : [1, 10, 1, 0.1, 0.1, 10, 1, 10] + +datadi = { + 'drive' : [1.9975, 2.0025], + 'R1AmpCom': [-5.06108785444674 + 1j*-0.5176227871244886, -5.010047982899977 + 1j*-0.508414957225015], + 'R2AmpCom': [0.17499161387621584 + 1j*0.019120282594415245, 0.17203785361315663 + 1j*0.018655133040042425] +} +measurementdfdi = pd.DataFrame(datadi) +dizmatrix = Zmat(measurementdfdi, False, False, frequencycolumn='drive', complexamplitude1 = 'R1AmpCom', complexamplitude2 = 'R2AmpCom', dtype=complex,) +U_d, S_d, Vh_d = np.linalg.svd(dizmatrix) +V_d = Vh_d.conj().T +notnormparam_di = V_d[:,-1] +final_di = normalize_parameters_1d_by_force(notnormparam_di,10) +#print(final_di) +#['m1', 'm2', 'b1', 'b2', 'k1', 'k2','k12', 'Driving Force'] +#checks out!! + + diff --git a/README.md b/README.md new file mode 100644 index 0000000..6ec6911 --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +The related publication is: https://www.nature.com/articles/s41598-023-50089-1. Please cite it if you use this code! diff --git a/helperfunctions.py b/helperfunctions.py index 9fbc5cb..a95fe07 100644 --- a/helperfunctions.py +++ b/helperfunctions.py @@ -65,12 +65,38 @@ def read_params(vect, MONOMER): return [M1, M2, B1, B2, K1, K2, K12, FD] def savefigure(savename): - plt.savefig(savename + '.svg', dpi = 600, bbox_inches='tight') - plt.savefig(savename + '.pdf', dpi = 600, bbox_inches='tight') - plt.savefig(savename + '.png', dpi = 600, bbox_inches='tight',) + try: + plt.savefig(savename + '.svg', dpi = 600, bbox_inches='tight', transparent=True) + except: + print('Could not save svg') + try: + plt.savefig(savename + '.pdf', dpi = 600, bbox_inches='tight', transparent=True) + # transparent true source: https://jonathansoma.com/lede/data-studio/matplotlib/exporting-from-matplotlib-to-open-in-adobe-illustrator/ + except: + print('Could not save pdf') + plt.savefig(savename + '.png', dpi = 600, bbox_inches='tight', transparent=True) print("Saved:\n", savename + '.png') +def calc_error_interval(resultsdf, resultsdfmean, groupby, fractionofdata = .95): + for column in ['E_lower_1D', 'E_upper_1D','E_lower_2D', 'E_upper_2D','E_lower_3D', 'E_upper_3D']: + resultsdfmean[column] = np.nan + dimensions = ['1D', '2D', '3D'] + items = resultsdfmean[groupby].unique() + + for item in items: + for D in dimensions: + avgerr = resultsdf[resultsdf[groupby]== item]['avgsyserr%_' + D] + avgerr = np.sort(avgerr) + halfalpha = (1 - fractionofdata)/2 + ## literally select the 95% fraction by tossing out the top 2.5% and the bottom 2.5% + ## For 95%, It's ideal if I do 40*N measurements for some integer N. + lowerbound = np.mean([avgerr[int(np.floor(halfalpha*len(avgerr)))], avgerr[int(np.ceil(halfalpha*len(avgerr)))]]) + upperbound = np.mean([avgerr[-int(np.floor(halfalpha*len(avgerr))+1)],avgerr[-int(np.ceil(halfalpha*len(avgerr))+1)]]) + resultsdfmean.loc[resultsdfmean[groupby]== item,'E_lower_'+ D] = lowerbound + resultsdfmean.loc[resultsdfmean[groupby]== item,'E_upper_' + D] = upperbound + return resultsdf, resultsdfmean + def beep(): try: winsound.PlaySound(r'C:\Windows\Media\Speech Disambiguation.wav', flags = winsound.SND_ASYNC) diff --git a/resonator_plotting.py b/resonator_plotting.py index af16147..d056311 100644 --- a/resonator_plotting.py +++ b/resonator_plotting.py @@ -71,8 +71,9 @@ def set_format(): 'size' : 7} mpl.rc('font', **font) plt.rcParams.update({'font.size': 7}) ## Nature Physics wants font size 5 to 7. - - + #plt.rcParams.update({ + # "pdf.use14corefonts": True # source: https://github.com/matplotlib/matplotlib/issues/21893 + #}) # findfont: Generic family 'sans-serif' not found because none of the following families were found: Arial #plt.rcParams["length"] = 3 plt.rcParams['axes.linewidth'] = 0.7 @@ -89,11 +90,31 @@ def set_format(): plt.rcParams['ytick.minor.visible'] = True plt.rcParams['xtick.minor.visible'] = True + plt.minorticks_on() plt.rcParams['axes.spines.top'] = True plt.rcParams['axes.spines.right'] = True # source: https://physicalmodelingwithpython.blogspot.com/2015/06/making-plots-for-publication.html plt.rcParams['pdf.fonttype'] = 42 # Don't outline text for NPhys plt.rcParams['svg.fonttype'] = 'none' + + plt.rcParams['axes.titlepad'] = -5 + + plt.rcParams['pdf.fonttype']=42 + # source: Nature https://drive.google.com/drive/folders/15m_c_ZfP2X4C9G7bOtQBdSlcLmJkUA7D + plt.rcParams['ps.fonttype'] = 42 + # source: https://jonathansoma.com/lede/data-studio/matplotlib/exporting-from-matplotlib-to-open-in-adobe-illustrator/ + +def text_color_legend(**kwargs): + l = plt.legend(**kwargs) + # set text color in legend + for text in l.get_texts(): + if '1D' in str(text): + text.set_color(co1) + elif '2D' in str(text): + text.set_color(co2) + elif '3D' in str(text): + text.set_color(co3) + return l """ Plot amplitude or phase versus frequency with set values, simulated data, and SVD results. Demo: if true, plot without tick marks """ @@ -204,6 +225,9 @@ def plotcomplex(complexZ, parameter, title = 'Complex Amplitude', cbar_label='Fr set_format() assert len(complexZ) == len(parameter) plt.sca(ax) + plt.axvline(0, color = 'k', linestyle='solid', linewidth = .5) + plt.axhline(0, color = 'k', linestyle='solid', linewidth = .5) + # colorful circles sc = ax.scatter(np.real(complexZ), np.imag(complexZ), s=s, c = parameter, cmap = cmap, label = 'simulated data' ) # s is marker size cbar = plt.colorbar(sc) @@ -213,11 +237,6 @@ def plotcomplex(complexZ, parameter, title = 'Complex Amplitude', cbar_label='Fr ax.set_ylabel('$\mathrm{Im}(Z)$ (m)') ax.axis('equal'); plt.title(title) - plt.gcf().canvas.draw() # draw so I can get xlim and ylim. - ymin, ymax = ax.get_ylim() - xmin, xmax = ax.get_xlim() - plt.vlines(0, ymin=ymin, ymax = ymax, colors = 'k', linestyle='solid', alpha = .5) - plt.hlines(0, xmin=xmin, xmax = xmax, colors = 'k', linestyle='solid', alpha = .5) #ax.plot([0,1],[0,0], lw=10,transform=ax.xaxis.get_transform() )#,transform=ax.xaxis.get_transform() ) #transform=ax.transAxes # label markers that are closest to the desired frequencies @@ -297,10 +316,10 @@ def plot_SVD_results(drive,R1_amp,R1_phase,R2_amp,R2_phase, measurementdf, K1, figsize = (figwidth*.6, figratio * figwidth*.8 ) else: figsize = (figwidth, figratio * figwidth ) - s = 3 + s = 25 # increased from 3, 2022-12-29 bigcircle = 30 amplabel = '$A\;$(m)' - phaselabel = '$\delta\;(\pi)$' + phaselabel = '$\phi\;(\pi)$' titleR1 = '' titleR2 = '' else: @@ -311,7 +330,7 @@ def plot_SVD_results(drive,R1_amp,R1_phase,R2_amp,R2_phase, measurementdf, K1, s=50 bigcircle = 150 amplabel = 'Amplitude $A$ (m)\n' - phaselabel = 'Phase $\delta$ ($\pi$)' + phaselabel = 'Phase $\phi$ ($\pi$)' titleR1= 'Simulated R1 Spectrum' titleR2 = 'Simulated R2 Spectrum' if demo: # overwrite all these diff --git a/resonatorfrequencypicker.py b/resonatorfrequencypicker.py index f293b09..3a13f2e 100644 --- a/resonatorfrequencypicker.py +++ b/resonatorfrequencypicker.py @@ -169,6 +169,16 @@ def create_drive_arrays(vals_set, MONOMER, forceboth, n=n, else: m = int((n-3-(fracevenfreq*n))/2) + morefrequencies = list(np.sort(morefrequencies)) + while morefrequencies[-1] > maxfreq: + if False: # too verbose! + print('Removing frequency', morefrequencies[-1]) + morefrequencies = morefrequencies[:-1] + while morefrequencies[0]< minfreq: + if False: + print('Removing frequency', morefrequencies[0]) + morefrequencies = morefrequencies[1:] + phaseR1 = theta1(morefrequencies, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, MONOMER, forceboth=forceboth) @@ -279,6 +289,7 @@ def res_freq_numeric(vals_set, MONOMER, forceboth, minfreq=.1, maxfreq=5, morefrequencies=None, includefreqs = [], unique = True, veryunique = True, numtoreturn = None, verboseplot = False, plottitle = None, verbose=verbose, iterations = 1, + use_R2_only = False, returnoptions = False): if verbose: @@ -331,6 +342,8 @@ def res_freq_numeric(vals_set, MONOMER, forceboth, print('indexlist:', indexlist) if max(indexlist) > len(morefrequencies): print('len(morefrequencies):', len(morefrequencies)) + print('morefrequencies:', morefrequencies) + print('indexlist:', indexlist) print('Repeating with finer frequency mesh around frequencies:', morefrequencies[np.sort(indexlist)]) assert min(morefrequencies) >= minfreq @@ -345,6 +358,9 @@ def res_freq_numeric(vals_set, MONOMER, forceboth, try: spacing = abs(morefrequenciesprev[index] - morefrequenciesprev[index-1]) except: + if verbose: + print('morefrequenciesprev:',morefrequenciesprev) + print('index:', index) spacing = abs(morefrequenciesprev[index+1] - morefrequenciesprev[index]) finerlist = np.linspace(max(minfreq,morefrequenciesprev[index]-spacing), min(maxfreq,morefrequenciesprev[index] + spacing), @@ -356,7 +372,7 @@ def res_freq_numeric(vals_set, MONOMER, forceboth, while morefrequencies[-1] > maxfreq: - if False: + if False: # too verbose! print('Removing frequency', morefrequencies[-1]) morefrequencies = morefrequencies[:-1] while morefrequencies[0]< minfreq: @@ -384,23 +400,26 @@ def res_freq_numeric(vals_set, MONOMER, forceboth, ## find maxima index1 = np.argmax(R1_amp_noiseless) - indexlist1, heights = find_peaks(R1_amp_noiseless, height=.015, distance = 5) - if debug: - print('index1:', index1) - print('indexlist1:',indexlist1) - print('heights', heights) - plt.axvline(morefrequencies[index1]) - for i in indexlist1: - plt.axvline(morefrequencies[i]) - assert index1 <= len(morefrequencies) - if len(indexlist1)>0: - assert max(indexlist1) <= len(morefrequencies) + if not MONOMER and not use_R2_only: + indexlist1, heights = find_peaks(R1_amp_noiseless, height=.015, distance = 5) + if debug: + print('index1:', index1) + print('indexlist1:',indexlist1) + print('heights', heights) + plt.axvline(morefrequencies[index1]) + for i in indexlist1: + plt.axvline(morefrequencies[i]) + assert index1 <= len(morefrequencies) + if len(indexlist1)>0: + assert max(indexlist1) <= len(morefrequencies) + else: + print('Warning: find_peaks on R1_amp returned indexlist:', indexlist1) + plt.figure() + plt.plot(R1_amp_noiseless) + plt.xlabel(R1_amp_noiseless) + plt.figure() else: - print('Warning: find_peaks on R1_amp returned indexlist:', indexlist1) - plt.figure() - plt.plot(R1_amp_noiseless) - plt.xlabel(R1_amp_noiseless) - plt.figure() + indexlist1 = [] if MONOMER: indexlist2 = [] else: @@ -417,7 +436,7 @@ def res_freq_numeric(vals_set, MONOMER, forceboth, indexlistampR1 = np.append(indexlist1,index1) assert max(indexlistampR1) <= len(morefrequencies) - if False: + if False: # too verbose! print('indexlistampR1:', indexlistampR1) if MONOMER: indexlist = indexlistampR1 @@ -434,7 +453,7 @@ def res_freq_numeric(vals_set, MONOMER, forceboth, assert max(indexlist) <= len(morefrequencies) indexlist = list(np.unique(indexlist)) - + indexlist = [int(index) for index in indexlist] first = False ## Check to see if findpeaks just worked @@ -448,7 +467,7 @@ def res_freq_numeric(vals_set, MONOMER, forceboth, if returnoptions: return opt2freqlist, 2 return opt2freqlist - if len(indexlist1) == 2: + if len(indexlist1) == 2 and not use_R2_only: opt3freqlist = list(np.sort(morefrequencies[indexlist1])) if abs(opt3freqlist[1]-opt3freqlist[0]) > thresh: if verbose: diff --git a/resonatorsimulator.py b/resonatorsimulator.py index ff67c5b..b86b199 100644 --- a/resonatorsimulator.py +++ b/resonatorsimulator.py @@ -2,6 +2,8 @@ """ Created on Tue Aug 9 16:42:36 2022 +Solve equations of motion using Cramer's rule in order to obtain amplitude and phase of each resonator in the network. + @author: vhorowit """ @@ -38,7 +40,7 @@ F = sp.symbols('F', real = True) #driving frequency (leave as variable) -wd = sp.symbols('\omega_d', real = True) +wd = sp.symbols(r'\omega_d', real = True) #symbolically Solve for driving amplitudes and phase using sympy @@ -62,8 +64,10 @@ #Matrices for Cramer's Rule: substitute force vector Fvec=[F,0] for each column in turn (m1 is driven, m2 is not) unknownsmatrix1FF = sp.Matrix([[F, -k12], [F, -wd**2*m2 + 1j*wd*b2 + k2 + k12]]) unknownsmatrix2FF = sp.Matrix([[-wd**2*m1 + 1j*wd*b1 + k1 + k12, F], [-k12, F]]) + #Apply Cramer's Rule to solve for Zvec complexamp1FF, complexamp2FF = (unknownsmatrix1FF.det()/unknownsmatrix.det(), unknownsmatrix2FF.det()/unknownsmatrix.det()) + #Solve for phases for each mass delta1FF = sp.arg(complexamp1FF) # Returns the argument (phase angle in radians) of a complex number. delta2FF = sp.arg(complexamp2FF) # sp.re(complexamp2)/sp.cos(delta2) (this is the same thing) @@ -72,12 +76,12 @@ complexamp1monomer = F/(-wd**2*m1 + 1j*wd*b1 + k1) # Don't need Cramer's rule for monomer. deltamono = sp.arg(complexamp1monomer) -### Ampolitude and phase +### Amplitude and phase #Wrap phases for plots wrap1 = (delta1)%(2*sp.pi) wrap2 = (delta2)%(2*sp.pi) -wrapmono = deltamono%(2*sp.pi) +wrapmono = (deltamono)%(2*sp.pi) wrap1FF = (delta1FF)%(2*sp.pi) wrap2FF = (delta2FF)%(2*sp.pi) @@ -119,6 +123,8 @@ """ #lambdify curves using sympy +#c = amplitude (not complex), t = phase +#re and im are the real and imaginary parts of complex number c1 = sp.lambdify((wd, k1, k2, k12, b1, b2, F, m1, m2), amp1) t1 = sp.lambdify((wd, k1, k2, k12, b1, b2, F, m1, m2), wrap1) @@ -147,11 +153,17 @@ #define functions -#curve = amplitude, theta = phase, e = err (i.e. noise) -def curve1(w, k_1, k_2, k_12, b1_, b2_, F_, m_1, m_2, e, MONOMER, forceboth): +#curve = amplitude, theta = phase, e = error (i.e. noise) +#realamp and imamp refer to the real and imaginary parts of the complex amplitude +#for MONOMER and forceboth, you would enter True or False +#forceboth means there are forces on both masses of the dimer +#to do a trimer, code needs to be added ofc. And you could forceone, forceboth or forcethree +#w takes in a list of frequencies + +def curve1(w, k_1, k_2, k_12, b1_, b2_, F_, m_1, m_2, e, MONOMER, forceboth): with np.errstate(divide='ignore'): if MONOMER: - return c1mono(np.array(w), k_1, b1_, F_, m_1) + e + return c1mono(np.array(w), k_1, b1_, F_, m_1) + e #why np.array(w) else: # dimer if forceboth: return c1FF(np.array(w), k_1, k_2, k_12, b1_, b2_, F_, m_1, m_2) + e @@ -202,6 +214,7 @@ def imamp1(w, k_1, k_2, k_12, b1_, b2_, F_, m_1, m_2, e, MONOMER, forceboth): else: return im1(np.array(w), k_1, k_2, k_12, b1_, b2_, F_, m_1, m_2) + e +#MONOMER = False here because there would only be one complex # and thus one re and one im for a monomer def realamp2(w, k_1, k_2, k_12, b1_, b2_, F_, m_1, m_2, e, forceboth, MONOMER = False): with np.errstate(divide='ignore'): if forceboth: @@ -216,7 +229,11 @@ def imamp2(w, k_1, k_2, k_12, b1_, b2_, F_, m_1, m_2, e, forceboth, MONOMER = Fa else: return im2(w, k_1, k_2, k_12, b1_, b2_, F_, m_1, m_2) + e -## Monomer: +## Monomer: + # Could use this or could use functions above and just specify MONOMER = True. + # Note: you would just put something like 0 for all the known parameters that + # don't apply to a monomer (like m_2, k_2) + def curvemono(w, k_1, b1_, F_, m_1, e): with np.errstate(divide='ignore'): return c1mono(np.array(w), k_1, b1_, F_, m_1) + e @@ -235,14 +252,16 @@ def imampmono(w, k_1, b1_, F_, m_1, e): """ calculate rsqrd in polar and cartesian - using either the vals_set (privileged rsqrd) or the parameters from SVD (experimental rsqrd) """ + using either the vals_set (privileged rsqrd) or the parameters from SVD (experimental rsqrd) + rsqrd is the Coefficient of Determination. + """ def rsqrdlist(R1_amp, R1_phase, R2_amp, R2_phase, R1_real_amp, R1_im_amp, R2_real_amp, R2_im_amp, drive, k1, k2, k12, b1, b2, F, m1, m2, MONOMER, forceboth): R1_amp_rsqrd = rsqrd(model = curve1(drive, k1, k2, k12, b1, b2, F, m1, m2,0 , MONOMER, forceboth = forceboth), data = R1_amp) R1_phase_rsqrd = rsqrd(model = theta1(drive, k1, k2, k12, b1, b2, F, m1, m2,0 , MONOMER, forceboth = forceboth), data = R1_phase) - if MONOMER: + if MONOMER: #np.nan - not a number (b/c a monomer only has one complex amplitude) R2_amp_rsqrd = np.nan R2_phase_rsqrd = np.nan else: @@ -268,6 +287,7 @@ def rsqrdlist(R1_amp, R1_phase, R2_amp, R2_phase, R1_real_amp, R1_im_amp, R2_rea """ maxamp is the maximum amplitude, probably the amplitude at the resonance peak. Returns arclength in same units as amplitude. +Not used. """ def arclength_between_pair(maxamp, Z1, Z2): radius = maxamp/2 # radius of twirl, approximating it as a circle @@ -306,27 +326,7 @@ def arclength_between_pair(maxamp, Z1, Z2): # calculate signed arclength s = r*theta return s, theta, r - -#define noise (randn(n,) gives a array of normally-distributed random numbers of size n) -# legacy values from before I implemented use_complexnoise. Hold on to them; Brittany was thoughtful about choosing these. -def amp1_noise(n, noiselevel): - global amplitudenoisefactor1 - amplitudenoisefactor1 = 0.005 - return noiselevel* amplitudenoisefactor1 * np.random.randn(n,) -def phase1_noise(n, noiselevel): - global phasenoisefactor1 - phasenoisefactor1 = 0.1 - return noiselevel* phasenoisefactor1 * np.random.randn(n,) -def amp2_noise(n, noiselevel): - global amplitudenoisefactor2 - amplitudenoisefactor2 = 0.0005 - return noiselevel* amplitudenoisefactor2 * np.random.randn(n,) -def phase2_noise(n, noiselevel): - global phasenoisefactor2 - phasenoisefactor2 = 0.2 - return noiselevel* phasenoisefactor2 * np.random.randn(n,) - -# This is the one I'm actually using + def complex_noise(n, noiselevel): global complexamplitudenoisefactor complexamplitudenoisefactor = 0.0005 @@ -335,9 +335,13 @@ def complex_noise(n, noiselevel): ## Calculate the amplitude and phase as spectra, possibly adding noise def calculate_spectra(drive, vals_set, noiselevel, MONOMER, forceboth): [m1_set, m2_set, b1_set, b2_set, k1_set, k2_set, k12_set, F_set] = read_params(vals_set, MONOMER) - - n = len(drive) + + try: + n = len(drive) + except TypeError: + n = drive.size + #usenoise and use_complexnoise are already set to True at the beginning of the code if usenoise: # add a random vector of positive and negative numbers to the curve. if use_complexnoise: # apply noise in cartesian coordinates @@ -387,6 +391,7 @@ def calculate_spectra(drive, vals_set, noiselevel, MONOMER, forceboth): R2_real_amp = np.real(R2_complexamp) R2_im_amp = np.imag(R2_complexamp) + #this is for no noise else: ## This won't work later when I expand the drive list but I use it as a sanity check. R1_amp_noiseless = curve1(drive, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, MONOMER, forceboth=forceboth) R1_phase_noiseless = theta1(drive, k1_set, k2_set, k12_set, b1_set, b2_set, F_set, m1_set, m2_set, 0, MONOMER, forceboth=forceboth) @@ -538,7 +543,7 @@ def SNRs(freqs,vals_set, noiselevel, MONOMER, forceboth, use_complexnoise=use_co return max(SNR_R1_list),max(SNR_R2_list),min(SNR_R1_list),min(SNR_R2_list), \ np.mean(SNR_R1_list),np.mean(SNR_R2_list), SNR_R1_list, SNR_R2_list -""" Experimentalist style to determine SNR """ +""" Experimentalist style to determine SNR, not used because I have a priori privilege """ def SNRcalc(freq,vals_set, noiselevel, MONOMER, forceboth, plot = False, ax = None, detailed = False): n = 50 # number of randomized values to calculate amps1 = np.zeros(n) @@ -569,4 +574,107 @@ def SNRcalc(freq,vals_set, noiselevel, MONOMER, forceboth, plot = False, ax = No # SNR, SNR, signal, noise, signal, noise return SNR_R1,SNR_R2, np.mean(amps1), np.std(amps1), np.mean(amps2), np.std(amps2) else: - return SNR_R1,SNR_R2 \ No newline at end of file + return SNR_R1,SNR_R2 + + +""" Below, I (Lydia) am practicing using the data to make graphs. + This is a helpful teaching tool. + Comment and uncomment sections to see the graph produced +""" + +#Making graphs for the monomer! Used example m, k, b, and f +#Note, the range of frequency matters +# freqs = np.linspace(1.99, 2.01, num=100) +# amps1 = curve1(freqs, 16, 0, 0, 0.01, 0, 1, 4, 0, 0, True, False) +# phase1 = theta1(freqs, 16, 0, 0, 0.01, 0, 1, 4, 0, 0, True, False) +# realpart = realamp1(freqs, 16, 0, 0, 0.01, 0, 1, 4, 0, 0, True, False) +# impart = imamp1(freqs, 16, 0, 0, 0.01, 0, 1, 4, 0, 0, True, False) + +#finding the maximum amplitude and the complex amplitude associated with it +# maxamp = max(amps1) +# maxamp_index = np.argmax(amps1) +# corresponding_freq = freqs[maxamp_index] +# real_atmax1 = realamp1(corresponding_freq, 16, 0, 0, 0.01, 0, 1, 4, 0, 0, True, False) +# im_atmax1 = imamp1(corresponding_freq, 16, 0, 0, 0.01, 0, 1, 4, 0, 0, True, False) + +#Create one plot for both amps vs freqs and phases vs freqs +# fig, ax1 = plt.subplots() +# ax1.plot(freqs, amps1,'r-', label='Amplitude') +# ax1.set_xlabel('Frequency') +# ax1.set_ylabel('Amplitude') +# ax2 = ax1.twinx() +# ax2.plot(freqs, phase1,'b-', label='Phase') +# ax2.set_ylabel('Phase') +# ax1.legend(loc='upper right') +# ax2.legend(loc='center right') + +#plot on complex plane +# plt.plot(realpart, impart, 'g-') +# plt.xlabel('Re(Z)') +# plt.ylabel('Im(Z)') +# plt.axis('equal') + +#Making graphs for the dimer! Used example m, k, b, and f +# freqs = np.linspace(0.5, 2, num=500) +# amps1 = curve1(freqs, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False, False) +# amps2 = curve2(freqs, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False) +# phase1 = theta1(freqs, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False, False) +# phase2 = theta2(freqs, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False) +# realpart1 = realamp1(freqs, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False, False) +# impart1 = imamp1(freqs, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False, False) +# realpart2 = realamp2(freqs, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False) +# impart2 = imamp2(freqs, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False) + +#getting info to use in NetMAP +# w_1 = 1.9975 +# w_2 = 2.0025 +# realpart11 = realamp1(w_1, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False, False) +# impart11 = imamp1(w_1, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False, False) +# realpart12 = realamp2(w_1, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False) +# impart12 = imamp2(w_1, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False) + +# realpart21 = realamp1(w_2, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False, False) +# impart21 = imamp1(w_2, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False, False) +# realpart22 = realamp2(w_2, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False) +# impart22 = imamp2(w_2, 1, 10, 1, 0.1, 0.1, 10, 1, 10, 0, False) + +#print(realpart11, impart11, realpart12, impart12, realpart21, impart21, realpart22, impart22) + +#Z_1 - amplitude and phase vs frequency +# fig, ax1 = plt.subplots() +# ax1.plot(freqs, amps1,'r-', label='Amplitude') +# ax1.set_xlabel('Frequency') +# ax1.set_ylabel('Amplitude') +# ax2 = ax1.twinx() +# ax2.plot(freqs, phase1,'b-', label='Phase') +# ax2.set_ylabel('Phase') +# ax1.legend(loc='upper right') +# ax2.legend(loc='center right') +# ax1.set_title('$Z_1(w)$') + +#Z_1 - complex plane +# plt.plot(realpart1, impart1, 'go', linestyle='dashed') +# plt.xlabel('Re(Z)') +# plt.ylabel('Im(Z)') +# plt.title('$Z_1(w)$') + +#Z_2 - amplitude and phase vs frequency +# fig, ax1 = plt.subplots() +# ax1.plot(freqs, amps2,'r-', label='Amplitude') +# ax1.set_xlabel('Frequency') +# ax1.set_ylabel('Amplitude') +# ax2 = ax1.twinx() +# ax2.plot(freqs, phase2,'b-', label='Phase') +# ax2.set_ylabel('Phase') +# ax1.legend(loc='upper right') +# ax2.legend(loc='center right') +# ax1.set_title('$Z_2(w)$') + +#Z_2 - complex plane +# plt.plot(realpart2, impart2, 'go', linestyle='dashed') +# plt.xlabel('Re(Z)') +# plt.ylabel('Im(Z)') +# plt.title('$Z_2(w)$') + + + diff --git a/resonatorstats.py b/resonatorstats.py index 75cb6c0..13ee085 100644 --- a/resonatorstats.py +++ b/resonatorstats.py @@ -7,9 +7,12 @@ import numpy as np import matplotlib.pyplot as plt +import warnings def syserr(x_found,x_set, absval = True): - se = 100*(x_found-x_set)/x_set + with warnings.catch_warnings(): + warnings.simplefilter('ignore') + se = 100*(x_found-x_set)/x_set if absval: return abs(se) else: @@ -42,6 +45,10 @@ def combinedsyserr(syserrs, notdof): # notdof = not degrees of freedom, meaning return avg, rms, max(abssyserrs), Lavg +""" +This definition of R^2 can come out negative. +Negative means that a flat line would fit the data better than the curve. +""" def rsqrd(model, data, plot=False, x=None, newfigure = True): SSres = sum((data - model)**2) SStot = sum((data - np.mean(data))**2) diff --git a/sim_series_of_experiments.py b/sim_series_of_experiments.py index af0719e..71f616f 100644 --- a/sim_series_of_experiments.py +++ b/sim_series_of_experiments.py @@ -23,9 +23,11 @@ def vary_num_p_with_fixed_freqdiff(vals_set, noiselevel, max_num_p = 10, n = 100, # number of frequencies for R^2 freqdiff = .1,just_res1 = False, repeats = 100, - verbose = False,recalculate_randomness=True ): - if verbose: - print('Running vary_num_p_with_fixed_freqdiff()') + verbose = False,recalculate_randomness=True, use_R2_only = False, + **kwargs + ): + if True: + print('Running vary_num_p_with_fixed_freqdiff() with max of', max_num_p, 'freqs.' ) [m1_set, m2_set, b1_set, b2_set, k1_set, k2_set, k12_set, F_set] = read_params(vals_set, MONOMER) @@ -34,10 +36,12 @@ def vary_num_p_with_fixed_freqdiff(vals_set, noiselevel, else: numtoreturn = 2 - ## To be fair for each, I use 3 iterations to really nail down the highest amplitudes. - reslist = res_freq_numeric(vals_set=vals_set, MONOMER=MONOMER,forceboth=forceboth, - mode = 'amp', iterations = 3, includefreqs = reslist, - unique = True, veryunique = True, numtoreturn = numtoreturn, verboseplot = False, verbose=verbose) + for i in range(5):## To be fair for each, I use iterations to really nail down the highest amplitudes. + reslist = res_freq_numeric(vals_set=vals_set, MONOMER=MONOMER,forceboth=forceboth, + mode = 'amp', iterations = 3, includefreqs = reslist, + unique = True, veryunique = True, numtoreturn = numtoreturn, + use_R2_only = use_R2_only, + verboseplot = False, verbose=verbose) ## measure the top two resonant frequencies res1 = reslist[0] if not MONOMER: @@ -68,8 +72,10 @@ def vary_num_p_with_fixed_freqdiff(vals_set, noiselevel, noiselevel=noiselevel, MONOMER=MONOMER, forceboth=forceboth) for this_num_p in range(2, max_num_p+1): - if this_num_p == max_num_p and y == 0: + if y == 0 and (this_num_p == max_num_p or this_num_p == 2): # first time with 2 or all the frequencies verbose = True + else: + verbose = False ## Do we recalculate the spectra every time or use the same datapoints as before? (This is slower.) if recalculate_randomness: @@ -84,9 +90,13 @@ def vary_num_p_with_fixed_freqdiff(vals_set, noiselevel, p = freqpoints(desiredfreqs = desiredfreqs, drive = drive) - thisres = simulated_experiment(drive[p], drive=drive,vals_set = vals_set, noiselevel=noiselevel, MONOMER=MONOMER, + thisres, plot_info_1D = simulated_experiment(drive[p], drive=drive,vals_set = vals_set, noiselevel=noiselevel, MONOMER=MONOMER, repeats=1 , verbose = verbose, forceboth=forceboth,labelcounts = False, - noiseless_spectra=noiseless_spectra, noisy_spectra = noisy_spectra) + noiseless_spectra=noiseless_spectra, noisy_spectra = noisy_spectra, + return_1D_plot_info = True, + **kwargs + ) + try: # repeated experiments results resultsdf = pd.concat([resultsdf,thisres], ignore_index=True) @@ -94,4 +104,4 @@ def vary_num_p_with_fixed_freqdiff(vals_set, noiselevel, resultsdf = thisres - return resultsdf \ No newline at end of file + return resultsdf, plot_info_1D \ No newline at end of file diff --git a/simulated_experiment.py b/simulated_experiment.py index e7df283..42d65dc 100644 --- a/simulated_experiment.py +++ b/simulated_experiment.py @@ -2,7 +2,9 @@ """ Created on Tue Aug 9 16:08:21 2022 -Simulated spectra + SVD recovery +Validating NetMAP: + +Simulated spectra + NetMAP recovery @author: vhorowit """ @@ -12,7 +14,7 @@ import matplotlib.pyplot as plt from helperfunctions import \ read_params, store_params, make_real_iff_real, flatten -from resonatorSVDanalysis import Zmat, \ +from NetMAP import Zmat, \ normalize_parameters_1d_by_force, normalize_parameters_assuming_3d, \ normalize_parameters_to_m1_F_set_assuming_2d from resonatorstats import syserr, combinedsyserr @@ -40,25 +42,28 @@ def describeresonator(vals_set, MONOMER, forceboth, noiselevel = None): print('Applying oscillating force to both masses.') else: print('Applying oscillating force to m1.') - print('Approximate Q1: ' + "{:.2f}".format(approx_Q(k = k1_set, m = m1_set, b=b1_set)) + - ' width: ' + "{:.2f}".format(approx_width(k = k1_set, m = m1_set, b=b1_set))) + print('Q1 ~ ' + "{:.0f}".format(approx_Q(k = k1_set, m = m1_set, b=b1_set)) + + ' and peak width ~ ' + "{:.2f}".format(approx_width(k = k1_set, m = m1_set, b=b1_set)) + ' rad/s') if not MONOMER: - print('Approximate Q2: ' + "{:.2f}".format(approx_Q(k = k2_set, m = m2_set, b=b2_set)) + - ' width: ' + "{:.2f}".format(approx_width(k = k2_set, m = m2_set, b=b2_set))) + print(' Q2 ~ ' + "{:.0f}".format(approx_Q(k = k2_set, m = m2_set, b=b2_set)) + + ' and second peak width: ' + "{:.2f}".format(approx_width(k = k2_set, m = m2_set, b=b2_set))) print('Q ~ sqrt(m*k)/b') - print('Set values:') + print('We set the input values to:') if MONOMER: - print('m: ' + str(m1_set) + ', b: ' + str(b1_set) + ', k: ' + str(k1_set) + ', F: ' + str(F_set)) + print('m = ' + str(m1_set) + ' kg, b = ' + str(b1_set) + ' N s/m, k = ' + str(k1_set) + ' N/m, f = ' + str(F_set), ' N') res1 = res_freq_weak_coupling(k1_set, m1_set, b1_set) - print('res freq: ', res1) + print('res freq ~ ', res1, 'rad/s') else: if forceboth: - forcestr = ', F1=F2: ' + forcestr = ', f1=f2: ' else: - forcestr = ', F1: ' - - print('m1: ' + str(m1_set) + ', b1: ' + str(b1_set) + ', k1: ' + str(k1_set) + forcestr + str(F_set)) - print('m2: ' + str(m2_set) + ', b2: ' + str(b2_set) + ', k2: ' + str(k2_set) + ', k12: ' + str(k12_set)) + forcestr = ', f1 = ' + + print('m_1= ' + str(m1_set) + 'kg, b_1 = ' + str(b1_set) + + 'N s/m, k_1 = ' + str(k1_set) + forcestr + str(F_set)) + print('m_2= ' + str(m2_set) + 'kg, b_2 = ' + str(b2_set) + + 'N s/m, k_2 = ' + str(k2_set) + ', k_{12} = ' + str(k12_set)) + if noiselevel is not None and use_complexnoise: print('noiselevel:', noiselevel) print('stdev sigma:', complexamplitudenoisefactor*noiselevel) @@ -168,13 +173,19 @@ def assert_results_length(results, columns): # unscaled_vector = vh[-1] has elements: m1, b1, k1, f1 -def describe_monomer_results(Zmatrix, smallest_s, unscaled_vector, M1, B1, K1, vals_set, absval = False ): +def describe_monomer_results(Zmatrix, smallest_s, unscaled_vector, M1, B1, K1, vals_set, freqs = None, absval = False ): [m1_set, m2_set, b1_set, b2_set, k1_set, k2_set, k12_set, F_set] = read_params(vals_set, True) m_err = syserr(M1,m1_set, absval) b_err = syserr(B1,b1_set, absval) k_err = syserr(K1,k1_set, absval) sqrtkoverm_err = syserr(np.sqrt(K1/M1),np.sqrt(k1_set/m1_set), absval) + if freqs is not None: + print("Using", len(freqs), "frequencies for SVD analysis, namely", + freqs, + "rad/s." ) + + print("The Z matrix is ", make_real_iff_real(Zmatrix), \ ". Its smallest singular value, s_1=", smallest_s, \ ", corresponds to singular vector\n p\\vec\\hat=(m\\hat, b\\hat, k\\hat, F)=α(", \ @@ -197,9 +208,9 @@ def describe_monomer_results(Zmatrix, smallest_s, unscaled_vector, M1, B1, K1, v "% of the correct values for m, b, and k.", \ "We also see that the recovered value √(k ̂/m ̂ )=", np.sqrt(K1/M1), "rad/s is more accurate than the individually recovered values for mass and spring stiffness;", - "this is generally true." - "The percent error for √(k ̂/m ̂ ) is", - sqrtkoverm_err, "%." + "this is generally true. ", + "The percent error for √(k ̂/m ̂ ) compared to √(k_set/m_set ) is", + sqrtkoverm_err, "%. This high accuracy likely arises because we choose frequency ω_a at the peak amplitude." ) @@ -210,7 +221,7 @@ def simulated_experiment(measurementfreqs, vals_set, noiselevel, MONOMER, force noiseless_spectra = None, noisy_spectra = None, freqnoise = False, overlay=False, context = None, saving = False, demo = False, resonatorsystem = None, show_set = None, - figsizeoverride1 = None, figsizeoverride2 = None,): + figsizeoverride1 = None, figsizeoverride2 = None, return_1D_plot_info= False): if verbose: @@ -252,6 +263,7 @@ def simulated_experiment(measurementfreqs, vals_set, noiselevel, MONOMER, force first = True results = [] + plot_info_1D = [] for i in range(repeats): # repeat the same measurement with different gaussian noise theseresults = [] @@ -335,7 +347,11 @@ def simulated_experiment(measurementfreqs, vals_set, noiselevel, MONOMER, force Zmatrix = Zmat(df, frequencycolumn = 'drive', complexamplitude1 = 'R1AmpCom', complexamplitude2 = 'R2AmpCom', MONOMER=MONOMER, forceboth=forceboth, dtype=complex) - u, s, vh = np.linalg.svd(Zmatrix, full_matrices = True) + try: + u, s, vh = np.linalg.svd(Zmatrix, full_matrices = True) + except: + print('Could not solve with noiselevel', noiselevel) + continue vh = make_real_iff_real(vh) theseresults.append(approx_Q(m = m1_set, k = k1_set, b = b1_set)) @@ -375,15 +391,19 @@ def simulated_experiment(measurementfreqs, vals_set, noiselevel, MONOMER, force if verbose and first: print("1D:") if MONOMER: - describe_monomer_results(Zmatrix, s[-1], vh[-1], M1, B1, K1, vals_set) + describe_monomer_results(Zmatrix, s[-1], vh[-1], M1, B1, K1, vals_set, freqs = drive[p]) plot_SVD_results(drive,R1_amp,R1_phase,R2_amp,R2_phase, df, K1, K2, K12, B1, B2, FD, M1, M2, vals_set, MONOMER=MONOMER, forceboth=forceboth, labelcounts = labelcounts, overlay = overlay, context = context, saving = saving, labelname = '1D', demo=demo, resonatorsystem = resonatorsystem, show_set = show_set, figsizeoverride1 = figsizeoverride1, figsizeoverride2 = figsizeoverride2) plt.show() + plot_info_1D = [drive,R1_amp,R1_phase,R2_amp,R2_phase, df, K1, K2, K12, B1, B2, FD, M1, M2, vals_set, + MONOMER, forceboth, labelcounts, overlay, + context, saving, '1D', demo, + resonatorsystem, show_set, + figsizeoverride1, figsizeoverride2] - el = store_params(M1, M2, B1, B2, K1, K2, K12, FD, MONOMER) theseresults.append(any(x<0 for x in el)) @@ -632,4 +652,7 @@ def simulated_experiment(measurementfreqs, vals_set, noiselevel, MONOMER, force resultsdf = pd.DataFrame( data=results, columns = flatten(theseresults_cols)) - return resultsdf \ No newline at end of file + if return_1D_plot_info: + return resultsdf, plot_info_1D + else: + return resultsdf diff --git a/trimer/Creating_graphs_with_data.py b/trimer/Creating_graphs_with_data.py new file mode 100644 index 0000000..fec83ce --- /dev/null +++ b/trimer/Creating_graphs_with_data.py @@ -0,0 +1,606 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Tue Oct 22 11:09:41 2024 + +@author: Lydia Bullock +""" + +import pandas as pd +import numpy as np +import matplotlib.pyplot as plt +from Trimer_simulator import re1, re2, re3, im1, im2, im3, c1, t1, c2, t2, c3, t3 +import os +import math + +#Saves graphs +def save_figure(figure, folder_name, file_name): + # Create the folder if it does not exist + if not os.path.exists(folder_name): + os.makedirs(folder_name) + + # Save the figure to the folder + file_path = os.path.join(folder_name, file_name) + figure.savefig(file_path, bbox_inches = 'tight') + plt.close(figure) + +''' Redoing the histogram for 2269 systems with 1 trial ''' + +# #Recall the data from first sheet +# file_path1 = '/Users/Student/Desktop/Summer Research 2024/Curve Fit vs NetMAP/More Systems 1 Trial - Histograms/All_Systems_1_Trial_1.xlsx' +# array_amp_phase1 = pd.read_excel(file_path1, sheet_name = 'Polar').to_numpy() +# array_X_Y1 = pd.read_excel(file_path1, sheet_name = 'Cartesian').to_numpy() +# array_NetMAP1 = pd.read_excel(file_path1, sheet_name = 'NetMAP').to_numpy() + +# #Recall the data from second sheet +# file_path2 = '/Users/Student/Desktop/Summer Research 2024/Curve Fit vs NetMAP/More Systems 1 Trial - Histograms/All_Systems_1_Trial_2.xlsx' +# array_amp_phase2 = pd.read_excel(file_path2, sheet_name = 'Polar').to_numpy() +# array_X_Y2 = pd.read_excel(file_path2, sheet_name = 'Cartesian').to_numpy() +# array_NetMAP2 = pd.read_excel(file_path2, sheet_name = 'NetMAP').to_numpy() + +# #Pull out _bar for each type from first sheet +# amp_phase_error1 = array_amp_phase1[:,50] +# X_Y_error1 = array_X_Y1[:, 50] +# NetMAP_error1 = array_NetMAP1[:,44] + +# #Pull out _bar for each type from first sheet +# amp_phase_error2 = array_amp_phase2[:,50] +# X_Y_error2 = array_X_Y2[:, 50] +# NetMAP_error2 = array_NetMAP2[:,44] + +# #Concatenate +# all_polar_error = np.concatenate((amp_phase_error1, amp_phase_error2)) +# all_NetMAP_error = np.concatenate((NetMAP_error1, NetMAP_error2)) +# almost_all_cartesian_error = np.concatenate((X_Y_error1, X_Y_error2)) +# all_cartesian_error = almost_all_cartesian_error[almost_all_cartesian_error != np.max(almost_all_cartesian_error)] + + +# #Graph histogram of _bar for both curve fits + +# # Compute max of data and set the bin limits so all data is included on graph +# data_max = np.max(np.concatenate((all_cartesian_error, all_polar_error, all_NetMAP_error))) +# if data_max > 39: +# linearbins = np.linspace(0, data_max + 2,50) +# else: +# linearbins = np.linspace(0, 40, 50) + +# #Graph linear! +# fig = plt.figure(figsize=(5, 4)) +# plt.xlabel(r'$\overline{\langle e \rangle}$ (%)', fontsize = 16) +# plt.ylabel('Counts', fontsize = 16) +# plt.yticks(fontsize=14) +# plt.xticks(fontsize=14) +# plt.hist(all_cartesian_error , bins = linearbins, alpha=0.5, color='green', label='Cartesian', edgecolor='green', histtype= 'step') +# plt.hist(all_polar_error , bins = linearbins, alpha=0.5, color='blue', label='Polar', edgecolor='blue', histtype= 'step') +# plt.hist(all_NetMAP_error , bins = linearbins, alpha=0.5, color='red', label='NetMAP', edgecolor='red', histtype= 'step') +# plt.legend(loc='best', fontsize = 13) + +# plt.show() +# save_figure(fig, 'Final', ' Bar Lin Hist Total without Largest Value.pdf' ) + +# # Set the bin limits so all data is included on graph +# if data_max > 100: +# logbins = np.logspace(-2, math.log10(data_max)+0.1, 50) +# else: +# logbins = np.logspace(-2, 1.8, 50) + +# #Graph log! +# fig = plt.figure(figsize=(5, 4)) +# plt.xlabel(r'$\overline{\langle e \rangle}$ (%)', fontsize = 16) +# plt.ylabel('Counts', fontsize = 16) +# plt.xscale('log') +# plt.yticks(fontsize=14) +# plt.xticks(fontsize=14) +# plt.hist(all_cartesian_error , bins = logbins, alpha=0.5, color='green', label='Cartesian', edgecolor='green', histtype= 'step', lw = 2) +# plt.hist(all_polar_error , bins = logbins, alpha=0.5, color='blue', label='Polar', edgecolor='blue', histtype= 'step', lw = 2) +# plt.hist(all_NetMAP_error , bins = logbins, alpha=0.5, color='red', label='NetMAP', edgecolor='red', histtype= 'step', lw = 2) +# plt.legend(loc='best', fontsize = 13) + +# plt.show() +# save_figure(fig, 'Final', ' Bar Log Hist Total without Largest Value.pdf' ) + +''' Redoing the histogram for 15 Systems - 10 freqs, better params ''' + +# #Recall the data +# amp_phase_e_bar = np.zeros(15) +# X_Y_e_bar = np.zeros(15) +# NetMAP_e_bar = np.zeros(15) + +# for i in range(15): +# file_path = f'/Users/Student/Desktop/Summer Research 2024/Curve Fit vs NetMAP/15 systems - 10 Freqs NetMAP & Better Parameters/Random_Automated_Guess_{i}.xlsx' +# array_amp_phase = pd.read_excel(file_path, sheet_name = 'Amp & Phase').to_numpy() +# array_X_Y = pd.read_excel(file_path, sheet_name = 'X & Y').to_numpy() +# array_NetMAP = pd.read_excel(file_path, sheet_name = 'NetMAP').to_numpy() + +# #Pull out _bar for each trial and add to list +# amp_phase_e_bar[i] = array_amp_phase[0, 51] +# X_Y_e_bar[i] = array_X_Y[0, 51] +# NetMAP_e_bar[i] = array_NetMAP[0, 45] + +# #Graph! +# fig = plt.figure(figsize=(10, 6)) +# linearbins = np.linspace(0,48,50) +# plt.title('Average Systematic Error Across Parameters Then Trials', fontsize = 18) +# plt.xlabel(' (%)', fontsize = 16) +# plt.ylabel('Counts', fontsize = 16) +# plt.xticks(fontsize=14) +# plt.yticks(fontsize=14) +# plt.hist(X_Y_e_bar, bins = linearbins, alpha=0.5, color='green', label='Cartesian (X & Y)', edgecolor='green') +# plt.hist(amp_phase_e_bar, bins =linearbins, alpha=0.5, color='blue', label='Polar (Amp & Phase)', edgecolor='blue') +# plt.hist(NetMAP_e_bar, bins = linearbins, alpha=0.5, color='red', label='NetMAP', edgecolor='red') +# plt.legend(loc='upper right', fontsize = 14) +# plt.show() + +# fig = plt.figure(figsize=(10, 6)) +# logbins = np.logspace(-2,1.5,50) +# plt.title('Average Systematic Error Across Parameters Then Trials', fontsize = 18) +# plt.xlabel(' (%)', fontsize = 16) +# plt.ylabel('Counts', fontsize = 16) +# plt.xscale('log') +# plt.xticks(fontsize=14) +# plt.yticks(fontsize=14) +# plt.hist(X_Y_e_bar, bins = logbins, alpha=0.5, color='green', label='Cartesian (X & Y)', edgecolor='green') +# plt.hist(amp_phase_e_bar, bins = logbins, alpha=0.5, color='blue', label='Polar (Amp & Phase)', edgecolor='blue') +# plt.hist(NetMAP_e_bar, bins = logbins, alpha=0.5, color='red', label='NetMAP', edgecolor='red') +# plt.legend(loc='upper right', fontsize = 14) +# plt.show() + +''' Redoing the histogram for Case Study''' + +#Recall the data +file_path = '/Users/Student/Desktop/Summer Research 2024/Curve Fit vs NetMAP/Case Study - 10 Freqs NetMAP Better Params 1000 Trials/Case_Study_1000_Trials.xlsx' +array_amp_phase = pd.read_excel(file_path, sheet_name = 'Amp & Phase').to_numpy() +array_X_Y = pd.read_excel(file_path, sheet_name = 'X & Y').to_numpy() +array_NetMAP = pd.read_excel(file_path, sheet_name = 'NetMAP').to_numpy() + +#Pull out for each type +amp_phase_error = array_amp_phase[:,50] +X_Y_error = array_X_Y[:, 50] +NetMAP_error = array_NetMAP[:,44] + +#Graph histograms! +linearbins = np.linspace(0,15,50) +fig = plt.figure(figsize=(5, 4)) +plt.xlabel(r'$\langle e \rangle$ (%)', fontsize = 16) +plt.ylabel('Counts', fontsize = 16) +plt.yticks(fontsize=14) +plt.xticks(fontsize=14) +plt.hist(X_Y_error, bins = linearbins, alpha=0.5, color='green', label='Cartesian', edgecolor='green') +plt.hist(amp_phase_error, bins=linearbins, alpha=0.5, color='blue', label='Polar', edgecolor='blue') +plt.hist(NetMAP_error, bins = linearbins, alpha=0.5, color='red', label='NetMAP', edgecolor='red') +plt.legend(loc='upper right', fontsize = 13) +plt.show() +save_figure(fig, 'Final', 'Case Study 1000 Lin Err Hist.pdf' ) + +logbins = np.logspace(-2,1.5,50) +fig = plt.figure(figsize=(5, 4)) +plt.xlabel(r'$\langle e \rangle$ (%)', fontsize = 16) +plt.ylabel('Counts', fontsize = 16) +plt.xscale('log') +plt.yticks(fontsize=14) +plt.xticks(fontsize=14) +plt.hist(X_Y_error, bins = logbins, alpha=0.5, color='green', label='Cartesian', edgecolor='green')#, histtype= 'step', lw = 2) +plt.hist(amp_phase_error, bins = logbins, alpha=0.5, color='blue', label='Polar', edgecolor='blue')#, histtype= 'step', lw = 2) +plt.hist(NetMAP_error, bins = logbins, alpha=0.5, color='red', label='NetMAP', edgecolor='red')#, histtype= 'step', lw = 2) +plt.legend(loc='upper right', fontsize = 13) +plt.show() +save_figure(fig, 'Final', 'Case Study 1000 Log Err Hist.pdf' ) + +def nonlinearhistc(X, bins, thresh=3, verbose=False): + map_to_bins = np.digitize(X, bins) - 1 # Adjusting to match zero-indexing + r = np.zeros(len(bins) - 1) # Adjusted to match the number of intervals + + # Populate counts for each bin + for i in map_to_bins: + if 0 <= i < len(r): + r[i] += 1 # count for bin i. + + if verbose: + print(f"Counts per bin: {r}") + + # Normalize by bin width + probabilitydensity = np.zeros(len(bins) - 1) + area = 0 + thinbincount = 0 + + for i in range(len(bins) - 1): # iterate through bins + if r[i] <= 1: + thinbincount += 1 + thisbinwidth = bins[i + 1] - bins[i] + probabilitydensity[i] = r[i] / thisbinwidth + area += probabilitydensity[i] * thisbinwidth # calculate total area + + print('Divide by area to make P dens. Area:', area) + + if thinbincount > thresh: + print(f"Warning: too many bins for data, thinbincount={thinbincount}") + elif verbose: + print(f"thinbincount={thinbincount}") + + # Normalize area + normedprobabilitydensity = [eachPdens / area + for eachPdens in probabilitydensity] + return normedprobabilitydensity, map_to_bins + +# # Graph probability densities +# bins_i_want = np.logspace(-2, 1.5, 100) + +# normprobXY, map_to_bins = nonlinearhistc(X_Y_error, bins_i_want) +# normprobampphase, map_to_bins = nonlinearhistc(amp_phase_error, bins_i_want) +# normprobNetMAP, map_to_bins = nonlinearhistc(NetMAP_error, bins_i_want) + +# plt.figure(figsize=(10, 6)) + +# plt.loglog(bins_i_want[:-1], normprobXY , '.', color='green', alpha = 0.5, label = 'Cartesian') +# plt.loglog(bins_i_want[:-1], normprobampphase ,'.', color='blue', alpha = 0.5, label = 'Polar') +# plt.loglog(bins_i_want[:-1], normprobNetMAP , '.', color='red', alpha = 0.5, label = 'NetMAP') + +# plt.xlabel(' (%)', fontsize=16) +# plt.ylabel('Normalized Probability Density', fontsize=16) +# plt.title('Normalized Probability Density of Average Systematic Error Across Parameters') +# plt.legend(loc='upper center', fontsize = 14) +# plt.show() + + + +'''Creating graphs - one example + Using the case study data (10 freq/better params 1000 trials), trial 1. + ''' + +# #Recall the data +# file_path = '/Users/Student/Desktop/Summer Research 2024/Curve Fit vs NetMAP/Case Study - 10 Freqs NetMAP Better Params 1000 Trials/Case_Study_1000_Trials.xlsx' +# array_amp_phase = pd.read_excel(file_path, sheet_name = 'Amp & Phase').to_numpy() +# array_X_Y = pd.read_excel(file_path, sheet_name = 'X & Y').to_numpy() + +# #True and guessed parameters +# true_params = array_amp_phase[1,:11] +# guess_params = array_amp_phase[1,11:22] +# freq = np.linspace(0.001, 4, 800) +# freq1 = np.linspace(0.001, 4, 700) + +# #The recovered parameters +# recovered_params_amp_phase = array_amp_phase[1,22:33] +# recovered_params_X_Y = array_X_Y[1,22:33] + +# #Error for each parameter from Amp/Phase Plots +# e_k1_amp = array_amp_phase[:, 33] +# e_k2_amp = array_amp_phase[:, 34] +# e_k3_amp = array_amp_phase[:, 35] +# e_k4_amp = array_amp_phase[:, 36] +# e_b1_amp = array_amp_phase[:, 37] +# e_b2_amp = array_amp_phase[:, 38] +# e_b3_amp = array_amp_phase[:, 39] +# e_m1_amp = array_amp_phase[:, 41] +# e_m2_amp = array_amp_phase[:, 42] +# e_m3_amp = array_amp_phase[:, 43] + +# #Error for each parameter from X/Y Plots +# e_k1_XY = array_X_Y[:, 33] +# e_k2_XY = array_X_Y[:, 34] +# e_k3_XY = array_X_Y[:, 35] +# e_k4_XY = array_X_Y[:, 36] +# e_b1_XY = array_X_Y[:, 37] +# e_b2_XY = array_X_Y[:, 38] +# e_b3_XY = array_X_Y[:, 39] +# e_m1_XY = array_X_Y[:, 41] +# e_m2_XY = array_X_Y[:, 42] +# e_m3_XY = array_X_Y[:, 43] + +# #Total error +# err_amp_phase = array_amp_phase[:,50] +# err_X_Y = array_X_Y[:,50] + +# #1 - R^2 values +# amp1_1minusR2 = 1 - array_amp_phase[:,44] +# amp2_1minusR2 = 1 - array_amp_phase[:,45] +# amp3_1minusR2 = 1 - array_amp_phase[:,46] +# phase1_1minusR2 = 1 - array_amp_phase[:,47] +# phase2_1minusR2 = 1 - array_amp_phase[:,48] +# phase3_1minusR2 = 1 - array_amp_phase[:,49] + +# X1_1minusR2 = 1 - array_X_Y[:,44] +# X2_1minusR2 = 1 - array_X_Y[:,45] +# X3_1minusR2 = 1 - array_X_Y[:,46] +# Y1_1minusR2 = 1 - array_X_Y[:,47] +# Y2_1minusR2 = 1 - array_X_Y[:,48] +# Y3_1minusR2 = 1 - array_X_Y[:,49] + +'''Box plots of recovered parameter spread - only 50 trials''' +# plt.boxplot([e_k1_amp, e_k2_amp, e_k3_amp, e_k4_amp, e_b1_amp, e_b2_amp, e_b3_amp, e_m1_amp, e_m2_amp, e_m3_amp], positions=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) +# plt.xticks([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], ['k1', 'k2', 'k3', 'k4', 'b1', 'b2', 'b3', 'm1', 'm2', 'm3']) +# plt.xlabel('Parameters') +# plt.ylabel('Error (%)') +# plt.title('Amplitude and Phase') +# plt.savefig('parameter_box_plot.pdf') +# plt.show() + +''' How does error compare to 1-R^2?''' +#Amp, Phase +# fig = plt.figure(figsize=(16,8)) +# gs = fig.add_gridspec(2, 3, hspace=0.1, wspace=0.1) +# ((ax1, ax2, ax3), (ax4, ax5, ax6)) = gs.subplots(sharex=False, sharey='row') + +# ax1.plot(amp1_1minusR2, err_amp_phase,'ro', alpha=0.5, markersize=5.5) +# ax2.plot(amp2_1minusR2, err_amp_phase,'bo', alpha=0.5, markersize=5.5) +# ax3.plot(amp3_1minusR2, err_amp_phase,'go', alpha=0.5, markersize=5.5) +# ax4.plot(phase1_1minusR2, err_amp_phase,'ro', alpha=0.5, markersize=5.5) +# ax5.plot(phase2_1minusR2, err_amp_phase,'bo', alpha=0.5, markersize=5.5) +# ax6.plot(phase3_1minusR2, err_amp_phase,'go', alpha=0.5, markersize=5.5) + +# ax1.set_title('Amp 1', fontsize=18) +# ax2.set_title('Amp 2', fontsize=18) +# ax3.set_title('Amp 3', fontsize=18) +# ax4.set_title('Phase 1', fontsize=18) +# ax5.set_title('Phase 2', fontsize=18) +# ax6.set_title('Phase 3', fontsize=18) +# ax1.set_ylabel(' (%)', fontsize=16) +# ax4.set_ylabel(' (%)', fontsize=16) +# ax4.set_xlabel('1-R^2', fontsize=16) +# ax5.set_xlabel('1-R^2', fontsize=16) +# ax6.set_xlabel('1-R^2', fontsize=16) + +# for ax in [ax1, ax2, ax3, ax4, ax5, ax6]: +# ax.set_xscale('log') +# ax.set_yscale('log') + +# plt.savefig('err_vs_rsquared_amp_phase.pdf') +# plt.show() + +#X and Y +# fig = plt.figure(figsize=(16,8)) +# gs = fig.add_gridspec(2, 3, hspace=0.1, wspace=0.1) +# ((ax1, ax2, ax3), (ax4, ax5, ax6)) = gs.subplots(sharex=False, sharey='row') + +# ax1.plot(X1_1minusR2, err_X_Y,'ro', alpha=0.2, markersize=5.5) +# ax2.plot(X2_1minusR2, err_X_Y,'bo', alpha=0.2, markersize=5.5) +# ax3.plot(X3_1minusR2, err_X_Y,'go', alpha=0.2, markersize=5.5) +# ax4.plot(Y1_1minusR2, err_X_Y,'ro', alpha=0.2, markersize=5.5) +# ax5.plot(Y2_1minusR2, err_X_Y,'bo', alpha=0.2, markersize=5.5) +# ax6.plot(Y3_1minusR2, err_X_Y,'go', alpha=0.2, markersize=5.5) + +# ax1.set_title('X 1', fontsize=18) +# ax2.set_title('X 2', fontsize=18) +# ax3.set_title('X 3', fontsize=18) +# ax4.set_title('Y 1', fontsize=18) +# ax5.set_title('Y 2', fontsize=18) +# ax6.set_title('Y 3', fontsize=18) +# ax1.set_ylabel(' (%)', fontsize=16) +# ax4.set_ylabel(' (%)', fontsize=16) +# ax4.set_xlabel('1-R^2', fontsize=16) +# ax5.set_xlabel('1-R^2', fontsize=16) +# ax6.set_xlabel('1-R^2', fontsize=16) + +# for ax in [ax1, ax2, ax3, ax4, ax5, ax6]: +# ax.set_xscale('log') +# ax.set_yscale('log') + +# plt.savefig('err_vs_rsquared_XY.pdf') +# plt.show() + +# '''Graphing Amp/Phase with addition of complex plots''' +# #Create the true data - not including complex noise (so not using curve1, etc functions) because I didn't save the exact noise +# #for each trial and also this is just for visualization so it doesn't matter so much because I have the recovered parameters regardless and the noise is not noticable on the graph +# Amp1 = c1(freq1, *true_params) +# Phase1 = t1(freq1, *true_params) +# Amp2 = c2(freq1, *true_params) +# Phase2 = t2(freq1, *true_params) +# Amp3 = c3(freq1, *true_params) +# Phase3 = t3(freq1, *true_params) +# X1 = re1(freq1, *true_params) +# Y1 = im1(freq1, *true_params) +# X2 = re2(freq1, *true_params) +# Y2 = im2(freq1, *true_params) +# X3 = re3(freq1, *true_params) +# Y3 = im3(freq1, *true_params) + +# #Create the initial guesses +# Amp1_guess = c1(freq, *guess_params) +# Phase1_guess = t1(freq, *guess_params) +# Amp2_guess = c2(freq, *guess_params) +# Phase2_guess = t2(freq, *guess_params) +# Amp3_guess = c3(freq, *guess_params) +# Phase3_guess = t3(freq, *guess_params) +# X1_guess = re1(freq, *guess_params) +# Y1_guess = im1(freq, *guess_params) +# X2_guess = re2(freq, *guess_params) +# Y2_guess = im2(freq, *guess_params) +# X3_guess = re3(freq, *guess_params) +# Y3_guess = im3(freq, *guess_params) + +# #Create the final fit! +# Amp1_fitted = c1(freq, *recovered_params_amp_phase) +# Phase1_fitted = t1(freq, *recovered_params_amp_phase) +# Amp2_fitted = c2(freq, *recovered_params_amp_phase) +# Phase2_fitted = t2(freq, *recovered_params_amp_phase) +# Amp3_fitted = c3(freq, *recovered_params_amp_phase) +# Phase3_fitted = t3(freq, *recovered_params_amp_phase) +# X1_fitted = re1(freq, *recovered_params_X_Y) +# Y1_fitted = im1(freq, *recovered_params_X_Y) +# X2_fitted = re2(freq, *recovered_params_X_Y) +# Y2_fitted = im2(freq, *recovered_params_X_Y) +# X3_fitted = re3(freq, *recovered_params_X_Y) +# Y3_fitted = im3(freq, *recovered_params_X_Y) + +# # Begin graphing for Amp and Phase +# fig = plt.figure(figsize=(16,11)) +# gs = fig.add_gridspec(3, 3, hspace=0.4, wspace=0.05) + +# ax1 = fig.add_subplot(gs[0, 0]) +# ax2 = fig.add_subplot(gs[0, 1], sharex=ax1, sharey=ax1) +# ax3 = fig.add_subplot(gs[0, 2], sharex=ax1, sharey=ax1) +# ax4 = fig.add_subplot(gs[1, 0], sharex=ax1) +# ax5 = fig.add_subplot(gs[1, 1], sharex=ax1, sharey=ax4) +# ax6 = fig.add_subplot(gs[1, 2], sharex=ax1, sharey=ax4) +# ax7 = fig.add_subplot(gs[2, 0], aspect='equal') +# ax8 = fig.add_subplot(gs[2, 1], sharex=ax7, sharey=ax7, aspect='equal') +# ax9 = fig.add_subplot(gs[2, 2], sharex=ax7, sharey=ax7, aspect='equal') + +# #original data +# ax1.plot(freq1, Amp1,'ro-', alpha=0.5, markersize=5.5, label = 'Data') +# ax2.plot(freq1, Amp2,'bo-', alpha=0.5, markersize=5.5, label = 'Data') +# ax3.plot(freq1, Amp3,'go-', alpha=0.5, markersize=5.5, label = 'Data') +# ax4.plot(freq1, Phase1,'ro-', alpha=0.5, markersize=5.5, label = 'Data') +# ax5.plot(freq1, Phase2,'bo-', alpha=0.5, markersize=5.5, label = 'Data') +# ax6.plot(freq1, Phase3,'go-', alpha=0.5, markersize=5.5, label = 'Data') +# ax7.plot(X1,Y1,'ro-', alpha=0.5, markersize=5.5, label = 'Data') +# ax8.plot(X2,Y2,'bo-', alpha=0.5, markersize=5.5, label = 'Data') +# ax9.plot(X3,Y3,'go-', alpha=0.5, markersize=5.5, label = 'Data') + +# #fitted curves +# ax1.plot(freq, Amp1_fitted,'c-', label='Fit', lw=2.5) +# ax2.plot(freq, Amp2_fitted,'r-', label='Fit', lw=2.5) +# ax3.plot(freq, Amp3_fitted,'m-', label='Fit', lw=2.5) +# ax4.plot(freq, Phase1_fitted,'c-', label='Fit', lw=2.5) +# ax5.plot(freq, Phase2_fitted,'r-', label='Fit', lw=2.5) +# ax6.plot(freq, Phase3_fitted,'m-', label='Fit', lw=2.5) +# ax7.plot(X1_fitted, Y1_fitted, 'c-', label='Fit', lw=2.5) +# ax8.plot(X2_fitted, Y2_fitted, 'r-', label='Fit', lw=2.5) +# ax9.plot(X3_fitted, Y3_fitted, 'm-', label='Fit', lw=2.5) + +# #inital guess curves +# ax1.plot(freq, Amp1_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax2.plot(freq, Amp2_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax3.plot(freq, Amp3_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax4.plot(freq, Phase1_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax5.plot(freq, Phase2_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax6.plot(freq, Phase3_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax7.plot(X1_guess, Y1_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax8.plot(X2_guess, Y2_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax9.plot(X3_guess, Y3_guess, color='#4682B4', linestyle='dashed', label='Guess') + + +# #Graph parts +# fig.suptitle('Trimer Resonator: Amplitude and Phase', fontsize=32) +# ax1.set_title('Mass 1', fontsize=26) +# ax2.set_title('Mass 2', fontsize=26) +# ax3.set_title('Mass 3', fontsize=26) +# ax1.set_ylabel('Amplitude', fontsize=26) +# ax4.set_ylabel('Phase', fontsize=26) +# ax7.set_ylabel('Imaginary', fontsize=26) + +# ax1.label_outer() +# ax2.label_outer() +# ax3.label_outer() +# ax5.tick_params(labelleft=False) +# ax6.tick_params(labelleft=False) +# ax7.label_outer() +# ax8.label_outer() +# ax9.label_outer() + +# ax4.set_xlabel('Frequency', fontsize=26) +# ax5.set_xlabel('Frequency', fontsize=26) +# ax6.set_xlabel('Frequency', fontsize=26) +# ax7.set_xlabel('Real', fontsize=26) +# ax8.set_xlabel('Real', fontsize=26) +# ax9.set_xlabel('Real', fontsize=26) + +# ax1.legend(fontsize=20) +# ax2.legend(fontsize=20) +# ax3.legend(fontsize=20) +# # ax4.legend(fontsize=20) +# # ax5.legend(fontsize=20) +# # ax6.legend(fontsize=20, loc = 'upper right') +# # ax7.legend(fontsize=20, bbox_to_anchor=(1, 1)) +# # ax8.legend(fontsize=20, bbox_to_anchor=(1, 1)) +# # ax9.legend(fontsize=20, bbox_to_anchor=(1, 1)) + +# axes = [ax1, ax2, ax3, ax4, ax5, ax6, ax7, ax8, ax9] +# for ax in axes: +# ax.tick_params(axis='both', labelsize=18) # Change tick font size + +# plt.show() + +# # Begin graphing for X and Y +# fig = plt.figure(figsize=(16,11)) +# gs = fig.add_gridspec(3, 3, hspace=0.5, wspace=0.05) + +# ax1 = fig.add_subplot(gs[0, 0]) +# ax2 = fig.add_subplot(gs[0, 1], sharex=ax1, sharey=ax1) +# ax3 = fig.add_subplot(gs[0, 2], sharex=ax1, sharey=ax1) +# ax4 = fig.add_subplot(gs[1, 0], sharex=ax1) +# ax5 = fig.add_subplot(gs[1, 1], sharex=ax1, sharey=ax4) +# ax6 = fig.add_subplot(gs[1, 2], sharex=ax1, sharey=ax4) +# ax7 = fig.add_subplot(gs[2, 0], aspect='equal') +# ax8 = fig.add_subplot(gs[2, 1], sharex=ax7, sharey=ax7, aspect='equal') +# ax9 = fig.add_subplot(gs[2, 2], sharex=ax7, sharey=ax7, aspect='equal') + +# #original data +# ax1.plot(freq1, X1,'ro-', alpha=0.5, markersize=5.5, label = 'Data') +# ax2.plot(freq1, X2,'bo-', alpha=0.5, markersize=5.5, label = 'Data') +# ax3.plot(freq1, X3,'go-', alpha=0.5, markersize=5.5, label = 'Data') +# ax4.plot(freq1, Y1,'ro-', alpha=0.5, markersize=5.5, label = 'Data') +# ax5.plot(freq1, Y2,'bo-', alpha=0.5, markersize=5.5, label = 'Data') +# ax6.plot(freq1, Y3,'go-', alpha=0.5, markersize=5.5, label = 'Data') +# ax7.plot(X1,Y1,'ro-', alpha=0.5, markersize=5.5, label = 'Data') +# ax8.plot(X2,Y2,'bo-', alpha=0.5, markersize=5.5, label = 'Data') +# ax9.plot(X3,Y3,'go-', alpha=0.5, markersize=5.5, label = 'Data') + +# #fitted curves +# ax1.plot(freq, X1_fitted,'c-', label='Fit', lw=2.5) +# ax2.plot(freq, X2_fitted,'r-', label='Fit', lw=2.5) +# ax3.plot(freq, X3_fitted,'m-', label='Fit', lw=2.5) +# ax4.plot(freq, Y1_fitted,'c-', label='Fit', lw=2.5) +# ax5.plot(freq, Y2_fitted,'r-', label='Fit', lw=2.5) +# ax6.plot(freq, Y3_fitted,'m-', label='Fit', lw=2.5) +# ax7.plot(X1_fitted, Y1_fitted, 'c-', label='Fit', lw=2.5) +# ax8.plot(X2_fitted, Y2_fitted, 'r-', label='Fit', lw=2.5) +# ax9.plot(X3_fitted, Y3_fitted, 'm-', label='Fit', lw=2.5) + +# #inital guess curves +# ax1.plot(freq, X1_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax2.plot(freq, X2_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax3.plot(freq, X3_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax4.plot(freq, Y1_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax5.plot(freq, Y2_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax6.plot(freq, Y3_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax7.plot(X1_guess, Y1_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax8.plot(X2_guess, Y2_guess, color='#4682B4', linestyle='dashed', label='Guess') +# ax9.plot(X3_guess, Y3_guess, color='#4682B4', linestyle='dashed', label='Guess') + +# #Graph parts +# fig.suptitle('Trimer Resonator: Real and Imaginary', fontsize=24) +# ax1.set_title('Mass 1', fontsize=26) +# ax2.set_title('Mass 2', fontsize=26) +# ax3.set_title('Mass 3', fontsize=26) +# ax1.set_ylabel('Real', fontsize=26) +# ax4.set_ylabel('Imaginary', fontsize=26) +# ax7.set_ylabel('Imaginary', fontsize=26) + +# ax1.label_outer() +# ax2.label_outer() +# ax3.label_outer() +# ax5.tick_params(labelleft=False) +# ax6.tick_params(labelleft=False) +# ax7.label_outer() +# ax8.label_outer() +# ax9.label_outer() + +# ax4.set_xlabel('Frequency', fontsize=26) +# ax5.set_xlabel('Frequency', fontsize=26) +# ax6.set_xlabel('Frequency', fontsize=26) +# ax7.set_xlabel('Real', fontsize=26) +# ax8.set_xlabel('Real', fontsize=26) +# ax9.set_xlabel('Real', fontsize=26) + +# ax1.legend(fontsize=20) +# ax2.legend(fontsize=20) +# ax3.legend(fontsize=20) +# # ax4.legend(fontsize=13) +# # ax5.legend(fontsize=13) +# # ax6.legend(fontsize=13) +# # ax7.legend(fontsize=13, loc='upper left', bbox_to_anchor=(1, 1)) +# # ax8.legend(fontsize=13, loc='upper left', bbox_to_anchor=(1, 1)) +# # ax9.legend(fontsize=13, loc='upper left', bbox_to_anchor=(1, 1)) + +# axes = [ax1, ax2, ax3, ax4, ax5, ax6, ax7, ax8, ax9] +# for ax in axes: +# ax.tick_params(axis='both', labelsize=18) # Change tick font size + + +# plt.show() + + + + + + diff --git a/trimer/Curve Fit Testing/.DS_Store b/trimer/Curve Fit Testing/.DS_Store new file mode 100644 index 0000000..22d1fe4 Binary files /dev/null and b/trimer/Curve Fit Testing/.DS_Store differ diff --git a/trimer/Curve Fit Testing/Changing One Param - Curve Fit/.DS_Store b/trimer/Curve Fit Testing/Changing One Param - Curve Fit/.DS_Store new file mode 100644 index 0000000..ba579d4 Binary files /dev/null and b/trimer/Curve Fit Testing/Changing One Param - Curve Fit/.DS_Store differ diff --git a/trimer/Curve Fit Testing/Changing One Param - Curve Fit/Changing_k1_M2-Amplitude.xlsx b/trimer/Curve Fit Testing/Changing One Param - Curve Fit/Changing_k1_M2-Amplitude.xlsx new file mode 100644 index 0000000..3423a96 Binary files /dev/null and b/trimer/Curve Fit Testing/Changing One Param - Curve Fit/Changing_k1_M2-Amplitude.xlsx differ diff --git a/trimer/Curve Fit Testing/Changing One Param - Curve Fit/Mass 2 plots - amp/.DS_Store b/trimer/Curve Fit Testing/Changing One Param - Curve Fit/Mass 2 plots - amp/.DS_Store new file mode 100644 index 0000000..9b57c47 Binary files /dev/null and b/trimer/Curve Fit Testing/Changing One Param - Curve Fit/Mass 2 plots - amp/.DS_Store differ diff --git a/trimer/Curve Fit Testing/Generating Random Params - Curve Fit/.DS_Store b/trimer/Curve Fit Testing/Generating Random Params - Curve Fit/.DS_Store new file mode 100644 index 0000000..d946f93 Binary files /dev/null and b/trimer/Curve Fit Testing/Generating Random Params - Curve Fit/.DS_Store differ diff --git a/trimer/Curve Fit Testing/Generating Random Params - Curve Fit/Generating_Random_Params_Imaginary_Part.xlsx b/trimer/Curve Fit Testing/Generating Random Params - Curve Fit/Generating_Random_Params_Imaginary_Part.xlsx new file mode 100644 index 0000000..26db460 Binary files /dev/null and b/trimer/Curve Fit Testing/Generating Random Params - Curve Fit/Generating_Random_Params_Imaginary_Part.xlsx differ diff --git a/trimer/Curve Fit Testing/Generating Random Params - Curve Fit/Generating_Random_Params_Phase.xlsx b/trimer/Curve Fit Testing/Generating Random Params - Curve Fit/Generating_Random_Params_Phase.xlsx new file mode 100644 index 0000000..5aaafbd Binary files /dev/null and b/trimer/Curve Fit Testing/Generating Random Params - Curve Fit/Generating_Random_Params_Phase.xlsx differ diff --git a/trimer/Curve Fit Testing/Generating Random Params - Curve Fit/Generating_Random_Params_Real_Part.xlsx b/trimer/Curve Fit Testing/Generating Random Params - Curve Fit/Generating_Random_Params_Real_Part.xlsx new file mode 100644 index 0000000..10d27ba Binary files /dev/null and b/trimer/Curve Fit Testing/Generating Random Params - Curve Fit/Generating_Random_Params_Real_Part.xlsx differ diff --git a/trimer/Curve Fit Testing/Imaginary_vs_freq_random.py b/trimer/Curve Fit Testing/Imaginary_vs_freq_random.py new file mode 100644 index 0000000..5cf85ad --- /dev/null +++ b/trimer/Curve Fit Testing/Imaginary_vs_freq_random.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Thu Jul 11 09:49:54 2024 + +@author: lydiabullock +""" + +import numpy as np +import matplotlib.pyplot as plt +from lmfit import Model +import random +import pandas as pd +from Trimer_simulator import im1, im2, im3 + +#list to store the dataframe for each of the three masses in the trimer system +all_data = [] + +#Run the following for the first, second, and third masses +for mass in ['Mass 1', 'Mass 2', 'Mass 3']: + + #type of function to fit for all three amplitude curves + #t = phase + def imaginary_function(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3): + if mass == 'Mass 1': + return im1(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + elif mass == 'Mass 2': + return im2(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + else: + return im3(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + + #create data for graph + freq = np.linspace(0.001, 5, 100) + im_part = imaginary_function(freq, 3, 3, 3, 0, 2, 2, 2, 1, 5, 5, 5) + + #create dictionary for storing data + data = {'m1_guess': [], 'm2_guess': [], 'm3_guess': [], + 'b1_guess': [], 'b2_guess': [], 'b3_guess': [], + 'k1_guess': [], 'k2_guess': [], 'k3_guess': [], 'k4_guess': [], + 'F_guess': [], + 'm1_recovered': [], 'm2_recovered': [], 'm3_recovered': [], + 'b1_recovered': [], 'b2_recovered': [], 'b3_recovered': [], + 'k1_recovered': [], 'k2_recovered': [], 'k3_recovered': [], 'k4_recovered': [], + 'F_recovered': [], + 'r_squared': []} + + #correct parameters + parameters = [3, 3, 3, 0, 2, 2, 2, 1, 5, 5, 5] + + #define number of times we generate random guesses and get the data + num = 50 + + #create funtion that generates a random number in intervals of 0.5 + def random_num(start, end, interval): + num = random.uniform(start, end) + rounded_num = round(num / interval) * interval # Round the number to the nearest interval + return rounded_num + + + #generate random initial guess, curve fit, and calculate R^2 + for trial in range(num): + try: + #create the random guess that's within 2 units of correct parameters + random_initial_guess = [param + random_num(-2,3, 0.5) for param in parameters] + #add guesses to dictionary + data['k1_guess'].append(random_initial_guess[0]) + data['k2_guess'].append(random_initial_guess[1]) + data['k3_guess'].append(random_initial_guess[2]) + data['k4_guess'].append(random_initial_guess[3]) + data['b1_guess'].append(random_initial_guess[4]) + data['b2_guess'].append(random_initial_guess[5]) + data['b3_guess'].append(random_initial_guess[6]) + data['F_guess'].append(random_initial_guess[7]) + data['m1_guess'].append(random_initial_guess[8]) + data['m2_guess'].append(random_initial_guess[9]) + data['m3_guess'].append(random_initial_guess[10]) + + #curve fitting + model = Model(imaginary_function) + params = model.make_params(k1=random_initial_guess[0],k2=random_initial_guess[1],k3=random_initial_guess[2], + k4=random_initial_guess[3],b1=random_initial_guess[4],b2=random_initial_guess[5], + b3=random_initial_guess[6],F=random_initial_guess[7],m1=random_initial_guess[8], + m2=random_initial_guess[9],m3=random_initial_guess[10]) + result = model.fit(im_part, params, w=freq) + + #add recovered parameters to dictionary + for param_name in ['m1', 'm2', 'm3', 'b1', 'b2', 'b3', 'k1', 'k2', 'k3', 'k4', 'F']: + param_value = result.params[param_name].value + data[f'{param_name}_recovered'].append(param_value) + + + # Extracting the R-squared value + r_squared = 1 - result.residual.var() / np.var(im_part) + #add r_squared to dictionary + data['r_squared'].append(r_squared) + + #Graph! + #graph original data + plt.figure(figsize=(8,6)) + plt.plot(freq, im_part, 'bo', label='Original Data') + + #generate points for fitted curve + freq_fit = np.linspace(min(freq),max(freq), 500) #more w-values than before + im_fit = result.model.func(freq_fit, **result.best_values) + + #graph fitted curve + plt.plot(freq_fit, im_fit, '-', label='Fitted Curve') + + #graph parts + plt.legend(loc='best') + plt.xlabel('Frequency (Hz)') + plt.ylabel('Imaginary Part') + plt.title(f'Trimer Curve Fitting for {mass} - #{trial+1}') + + plt.savefig(f'/Users/Student/Desktop/Summer Research 2024/GitHub/NetMAP/Generating Random Params - Curve Fit/{mass} plots - imaginary part/plot_{trial+1}.png') + plt.show() + + except RuntimeError: + #If it takes too long, appends 0s to the dictionary + #R^2 = 0 means parameters not recovered + #Sam did this - I've run it a couple times and I haven't had any 0s yet + data['k1_guess'].append(0) + data['k2_guess'].append(0) + data['k3_guess'].append(0) + data['k4_guess'].append(0) + data['b1_guess'].append(0) + data['b2_guess'].append(0) + data['b3_guess'].append(0) + data['F_guess'].append(0) + data['m1_guess'].append(0) + data['m2_guess'].append(0) + data['m3_guess'].append(0) + data['k1_recovered'].append(0) + data['k2_recovered'].append(0) + data['k3_recovered'].append(0) + data['k4_recovered'].append(0) + data['b1_recovered'].append(0) + data['b2_recovered'].append(0) + data['b3_recovered'].append(0) + data['F_recovered'].append(0) + data['m1_recovered'].append(0) + data['m2_recovered'].append(0) + data['m3_recovered'].append(0) + data['r_squared'].append(0) + + #put all this data in a spreadsheet! + df = pd.DataFrame(data) + all_data.append(df) + +#write each DataFrame to a specific sheet +file_path = '/Users/Student/Desktop/Summer Research 2024/GitHub/NetMAP/Generating Random Params - Curve Fit/Generating_Random_Params_Imaginary_Part.xlsx' +with pd.ExcelWriter(file_path, engine='xlsxwriter') as writer: + all_data[0].to_excel(writer, sheet_name='M1', index=False) + all_data[1].to_excel(writer, sheet_name='M2', index=False) + all_data[2].to_excel(writer, sheet_name='M3', index=False) + + + + + + + + + + + + diff --git a/trimer/Curve Fit Testing/Phase_vs_freq_random.py b/trimer/Curve Fit Testing/Phase_vs_freq_random.py new file mode 100644 index 0000000..a518432 --- /dev/null +++ b/trimer/Curve Fit Testing/Phase_vs_freq_random.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Wed Jul 10 10:43:54 2024 + +@author: lydiabullock +""" + +import numpy as np +import matplotlib.pyplot as plt +from lmfit import Model +import random +import pandas as pd +from Trimer_simulator import t1, t2, t3 + +#A list to store the dataframe for each of the three masses in the trimer system +all_data = [] + +#Run the following for the first, second, and third masses +for mass in ['Mass 1', 'Mass 2', 'Mass 3']: + + #type of function to fit for all three amplitude curves + #t = phase + def phase_function(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3): + if mass == 'Mass 1': + return t1(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + elif mass == 'Mass 2': + return t2(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + else: + return t3(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + + #create data for graph + freq = np.linspace(0.001, 5, 100) + phase = phase_function(freq, 3, 3, 3, 0, 2, 2, 2, 1, 5, 5, 5) + + #create dictionary for storing data + data = {'m1_guess': [], 'm2_guess': [], 'm3_guess': [], + 'b1_guess': [], 'b2_guess': [], 'b3_guess': [], + 'k1_guess': [], 'k2_guess': [], 'k3_guess': [], 'k4_guess': [], + 'F_guess': [], + 'm1_recovered': [], 'm2_recovered': [], 'm3_recovered': [], + 'b1_recovered': [], 'b2_recovered': [], 'b3_recovered': [], + 'k1_recovered': [], 'k2_recovered': [], 'k3_recovered': [], 'k4_recovered': [], + 'F_recovered': [], + 'r_squared': []} + + #correct parameters + parameters = [3, 3, 3, 0, 2, 2, 2, 1, 5, 5, 5] + + #define number of times we generate random guesses and get the data + num = 50 + + #create funtion that generates a random number in intervals of 0.5 + def random_num(start, end, interval): + num = random.uniform(start, end) + rounded_num = round(num / interval) * interval # Round the number to the nearest interval + return rounded_num + + + #generate random initial guess, curve fit, and calculate R^2 + for trial in range(num): + try: + #create the random guess that's within 2 units of correct parameters + random_initial_guess = [param + random_num(-2,3, 0.5) for param in parameters] + #add guesses to dictionary + data['k1_guess'].append(random_initial_guess[0]) + data['k2_guess'].append(random_initial_guess[1]) + data['k3_guess'].append(random_initial_guess[2]) + data['k4_guess'].append(random_initial_guess[3]) + data['b1_guess'].append(random_initial_guess[4]) + data['b2_guess'].append(random_initial_guess[5]) + data['b3_guess'].append(random_initial_guess[6]) + data['F_guess'].append(random_initial_guess[7]) + data['m1_guess'].append(random_initial_guess[8]) + data['m2_guess'].append(random_initial_guess[9]) + data['m3_guess'].append(random_initial_guess[10]) + + #curve fitting + model = Model(phase_function) + params = model.make_params(k1=random_initial_guess[0],k2=random_initial_guess[1],k3=random_initial_guess[2], + k4=random_initial_guess[3],b1=random_initial_guess[4],b2=random_initial_guess[5], + b3=random_initial_guess[6],F=random_initial_guess[7],m1=random_initial_guess[8], + m2=random_initial_guess[9],m3=random_initial_guess[10]) + result = model.fit(phase, params, w=freq) + + #add recovered parameters to dictionary + fit_params = result.params + + for param_name in ['m1', 'm2', 'm3', 'b1', 'b2', 'b3', 'k1', 'k2', 'k3', 'k4', 'F']: + param_value = result.params[param_name].value + data[f'{param_name}_recovered'].append(param_value) + + + # Extracting the R-squared value + r_squared = 1 - result.residual.var() / np.var(phase) + #add r_squared to dictionary + data['r_squared'].append(r_squared) + + #Graph! + #graph original data + plt.figure(figsize=(8,6)) + plt.plot(freq, phase, 'bo', label='Original Data') + + #generate points for fitted curve + freq_fit = np.linspace(min(freq),max(freq), 500) #more w-values than before + phase_fit = result.model.func(freq_fit, **result.best_values) + + #graph fitted curve + plt.plot(freq_fit, phase_fit, '-', label='Fitted Curve') + + #graph parts + plt.legend(loc='best') + plt.xlabel('Frequency (Hz)') + plt.ylabel('Phase (Rad)') + plt.title(f'Trimer Curve Fitting for {mass} - #{trial+1}') + + plt.savefig(f'/Users/Student/Desktop/Summer Research 2024/GitHub/NetMAP/Generating Random Params - Curve Fit/{mass} plots - phase/plot_{trial+1}.png') + plt.show() + + except RuntimeError: + #If it takes too long, appends 0s to the dictionary + #R^2 = 0 means parameters not recovered + #Sam did this - I've run it a couple times and I haven't had any 0s yet + data['k1_guess'].append(0) + data['k2_guess'].append(0) + data['k3_guess'].append(0) + data['k4_guess'].append(0) + data['b1_guess'].append(0) + data['b2_guess'].append(0) + data['b3_guess'].append(0) + data['F_guess'].append(0) + data['m1_guess'].append(0) + data['m2_guess'].append(0) + data['m3_guess'].append(0) + data['k1_recovered'].append(0) + data['k2_recovered'].append(0) + data['k3_recovered'].append(0) + data['k4_recovered'].append(0) + data['b1_recovered'].append(0) + data['b2_recovered'].append(0) + data['b3_recovered'].append(0) + data['F_recovered'].append(0) + data['m1_recovered'].append(0) + data['m2_recovered'].append(0) + data['m3_recovered'].append(0) + data['r_squared'].append(0) + + #put all this data in a spreadsheet! + df = pd.DataFrame(data) + all_data.append(df) + +file_path = '/Users/Student/Desktop/Summer Research 2024/GitHub/NetMAP/Generating Random Params - Curve Fit/Generating_Random_Params_Phase.xlsx' +#write each DataFrame to a specific sheet +with pd.ExcelWriter(file_path, engine='xlsxwriter') as writer: + all_data[0].to_excel(writer, sheet_name='M1', index=False) + all_data[1].to_excel(writer, sheet_name='M2', index=False) + all_data[2].to_excel(writer, sheet_name='M3', index=False) + + + + + + + + + + + + diff --git a/trimer/Curve Fit Testing/Real_vs_freq_random.py b/trimer/Curve Fit Testing/Real_vs_freq_random.py new file mode 100644 index 0000000..b6398d3 --- /dev/null +++ b/trimer/Curve Fit Testing/Real_vs_freq_random.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Thu Jul 11 09:35:43 2024 + +@author: lydiabullock +""" + +import numpy as np +import matplotlib.pyplot as plt +from lmfit import Model +import random +import pandas as pd +from Trimer_simulator import re1, re2, re3 + +#A list to store the dataframe for each of the three masses in the trimer system +all_data = [] + +#Run the following for the first, second, and third masses +for mass in ['Mass 1', 'Mass 2', 'Mass 3']: + + #type of function to fit for all three amplitude curves + #t = phase + def real_function(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3): + if mass == 'Mass 1': + return re1(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + elif mass == 'Mass 2': + return re2(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + else: + return re3(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + + #create data for graph + freq = np.linspace(0.001, 5, 100) + real_part = real_function(freq, 3, 3, 3, 0, 2, 2, 2, 1, 5, 5, 5) + + #create dictionary for storing data + data = {'m1_guess': [], 'm2_guess': [], 'm3_guess': [], + 'b1_guess': [], 'b2_guess': [], 'b3_guess': [], + 'k1_guess': [], 'k2_guess': [], 'k3_guess': [], 'k4_guess': [], + 'F_guess': [], + 'm1_recovered': [], 'm2_recovered': [], 'm3_recovered': [], + 'b1_recovered': [], 'b2_recovered': [], 'b3_recovered': [], + 'k1_recovered': [], 'k2_recovered': [], 'k3_recovered': [], 'k4_recovered': [], + 'F_recovered': [], + 'r_squared': []} + + #correct parameters + parameters = [3, 3, 3, 0, 2, 2, 2, 1, 5, 5, 5] + + #define number of times we generate random guesses and get the data + num = 50 + + #create funtion that generates a random number in intervals of 0.5 + def random_num(start, end, interval): + num = random.uniform(start, end) + rounded_num = round(num / interval) * interval # Round the number to the nearest interval + return rounded_num + + #generate random initial guess, curve fit, and calculate R^2 + for trial in range(num): + try: + #create the random guess that's within 2 units of correct parameters + random_initial_guess = [param + random_num(-2,3, 0.5) for param in parameters] + #add guesses to dictionary + data['k1_guess'].append(random_initial_guess[0]) + data['k2_guess'].append(random_initial_guess[1]) + data['k3_guess'].append(random_initial_guess[2]) + data['k4_guess'].append(random_initial_guess[3]) + data['b1_guess'].append(random_initial_guess[4]) + data['b2_guess'].append(random_initial_guess[5]) + data['b3_guess'].append(random_initial_guess[6]) + data['F_guess'].append(random_initial_guess[7]) + data['m1_guess'].append(random_initial_guess[8]) + data['m2_guess'].append(random_initial_guess[9]) + data['m3_guess'].append(random_initial_guess[10]) + + #curve fitting + model = Model(real_function) + params = model.make_params(k1=random_initial_guess[0],k2=random_initial_guess[1],k3=random_initial_guess[2], + k4=random_initial_guess[3],b1=random_initial_guess[4],b2=random_initial_guess[5], + b3=random_initial_guess[6],F=random_initial_guess[7],m1=random_initial_guess[8], + m2=random_initial_guess[9],m3=random_initial_guess[10]) + result = model.fit(real_part, params, w=freq) + + #add recovered parameters to dictionary + fit_params = result.params + + for param_name in ['m1', 'm2', 'm3', 'b1', 'b2', 'b3', 'k1', 'k2', 'k3', 'k4', 'F']: + param_value = result.params[param_name].value + data[f'{param_name}_recovered'].append(param_value) + + + # Extracting the R-squared value + r_squared = 1 - result.residual.var() / np.var(real_part) + #add r_squared to dictionary + data['r_squared'].append(r_squared) + + #Graph! + #graph original data + plt.figure(figsize=(8,6)) + plt.plot(freq, real_part, 'bo', label='Original Data') + + #generate points for fitted curve + freq_fit = np.linspace(min(freq),max(freq), 500) #more w-values than before + real_part_fit = result.model.func(freq_fit, **result.best_values) + + #graph fitted curve + plt.plot(freq_fit, real_part_fit, '-', label='Fitted Curve') + + #graph parts + plt.legend(loc='best') + plt.xlabel('Frequency (Hz)') + plt.ylabel('Real Part') + plt.title(f'Trimer Curve Fitting for {mass} - #{trial+1}') + + plt.savefig(f'/Users/Student/Desktop/Summer Research 2024/GitHub/NetMAP/Generating Random Params - Curve Fit/{mass} plots - real part/plot_{trial+1}.png') + plt.show() + + except RuntimeError: + #If it takes too long, appends 0s to the dictionary + #R^2 = 0 means parameters not recovered + #Sam did this - I've run it a couple times and I haven't had any 0s yet + data['k1_guess'].append(0) + data['k2_guess'].append(0) + data['k3_guess'].append(0) + data['k4_guess'].append(0) + data['b1_guess'].append(0) + data['b2_guess'].append(0) + data['b3_guess'].append(0) + data['F_guess'].append(0) + data['m1_guess'].append(0) + data['m2_guess'].append(0) + data['m3_guess'].append(0) + data['k1_recovered'].append(0) + data['k2_recovered'].append(0) + data['k3_recovered'].append(0) + data['k4_recovered'].append(0) + data['b1_recovered'].append(0) + data['b2_recovered'].append(0) + data['b3_recovered'].append(0) + data['F_recovered'].append(0) + data['m1_recovered'].append(0) + data['m2_recovered'].append(0) + data['m3_recovered'].append(0) + data['r_squared'].append(0) + + #put all this data in a spreadsheet! + df = pd.DataFrame(data) + all_data.append(df) + +file_path = '/Users/Student/Desktop/Summer Research 2024/GitHub/NetMAP/Generating Random Params - Curve Fit/Generating_Random_Params_Real_Part.xlsx' +#write each DataFrame to a specific sheet +with pd.ExcelWriter(file_path, engine='xlsxwriter') as writer: + all_data[0].to_excel(writer, sheet_name='M1', index=False) + all_data[1].to_excel(writer, sheet_name='M2', index=False) + all_data[2].to_excel(writer, sheet_name='M3', index=False) + + + + + + + + + + + + diff --git a/trimer/Curve Fit Testing/Trimer_simulator.py b/trimer/Curve Fit Testing/Trimer_simulator.py new file mode 100644 index 0000000..9cc633d --- /dev/null +++ b/trimer/Curve Fit Testing/Trimer_simulator.py @@ -0,0 +1,316 @@ +# -*- coding: utf-8 -*- +""" +Spyder Editor + +This is a temporary script file. +""" + +''' Create code that simulates spectrum response for trimer + See if we can recover the parameters + Does NOT include noise ''' + +import numpy as np +import sympy as sp +import matplotlib.pyplot as plt + +#Define all variables for sympy + +#individual springs that correspond to individual masses +k1 = sp.symbols('k_1', real = True) + +#springs that connect two masses +k2 = sp.symbols('k_2', real = True) +k3 = sp.symbols('k_3', real = True) +k4 = sp.symbols('k_4', real = True) + +#damping coefficients +b1 = sp.symbols('b1', real = True) +b2 = sp.symbols('b2', real = True) +b3 = sp.symbols('b3', real = True) + +#masses +m1 = sp.symbols('m1', real = True) +m2 = sp.symbols('m2', real = True) +m3 = sp.symbols('m3', real = True) + +#Driving force amplitude +F = sp.symbols('F', real = True) + +#driving frequency (leave as variable) +wd = sp.symbols(r'\omega_d', real = True) + +#Symbolically solve for driving amplitudes and phase using sympy + +### Trimer +#Matrix for complex equations of motion, Matrix . Zvec = Fvec +unknownsmatrix = sp.Matrix([[-wd**2*m1 + 1j*wd*b1 + k1 + k2, -k2, 0], + [-k2, -wd**2*m2 + 1j*wd*b2 + k2 + k3, -k3], + [0, -k3, -wd**2*m3 + 1j*wd*b3 + k3 + k4]]) +''' Lydia - I'm pretty sure he had a mistake in the unknowns matrix. There were some k4's +showing up where they weren't supposed to be (-k4 where the zeros are now and one +k4 +in the first entry) ''' + +#Matrices for Cramer's Rule: substitute force vector Fvec=[F,0] for each column in turn (m1 is driven, m2 and m3 are not) +unknownsmatrix1 = sp.Matrix([[F, -k2, 0], + [0, -wd**2*m2 + 1j*wd*b2 + k2 + k3, -k3], + [0, -k3, -wd**2*m3 + 1j*wd*b3 + k3 + k4]]) +unknownsmatrix2 = sp.Matrix([[-wd**2*m1 + 1j*wd*b1 + k1 + k2, F, 0], + [-k2, 0, -k3], + [0, 0, -wd**2*m3 + 1j*wd*b3 + k3 + k4]]) +unknownsmatrix3 = sp.Matrix([[-wd**2*m1 + 1j*wd*b1 + k1 + k2, -k3, F], + [-k2, -wd**2*m2 + 1j*wd*b2 + k2 + k3, 0], + [0, -k3, 0]]) + +#Apply Cramer's Rule to solve for Zvec +complexamp1, complexamp2, complexamp3 = (unknownsmatrix1.det()/unknownsmatrix.det(), + unknownsmatrix2.det()/unknownsmatrix.det(), + unknownsmatrix3.det()/unknownsmatrix.det()) + +#Solve for phases for each mass +delta1 = sp.arg(complexamp1) # Returns the argument (phase angle in radians) of a complex number. +delta2 = sp.arg(complexamp2) # sp.re(complexamp2)/sp.cos(delta2) (this is the same thing) +delta3 = sp.arg(complexamp3) + + +### What if we apply the same force to all three masses of dimer? +#Matrices for Cramer's Rule: substitute force vector Fvec=[F,0] for each column in turn (m1 is driven, m2 is not) +unknownsmatrix1FFF = sp.Matrix([[F, -k2, 0], + [F, -wd**2*m2 + 1j*wd*b2 + k2 + k3, -k3], + [F, -k3, -wd**2*m3 + 1j*wd*b3 + k3 + k4]]) +unknownsmatrix2FFF = sp.Matrix([[-wd**2*m1 + 1j*wd*b1 + k1 + k2, F, 0], + [-k2, F, -k3], + [0, F, -wd**2*m3 + 1j*wd*b3 + k3 + k4]]) +unknownsmatrix3FFF = sp.Matrix([[-wd**2*m1 + 1j*wd*b1 + k1 + k2, -k2, F], + [-k2, -wd**2*m2 + 1j*wd*b2 + k2 + k3,F], + [0, -k3, F]]) +#Apply Cramer's Rule to solve for Zvec +complexamp1FFF, complexamp2FFF, complexamp3FFF = (unknownsmatrix1FFF.det()/unknownsmatrix.det(), + unknownsmatrix2FFF.det()/unknownsmatrix.det(), + unknownsmatrix3FFF.det()/unknownsmatrix.det()) +#Solve for phases for each mass +delta1FFF = sp.arg(complexamp1FFF) # Returns the argument (phase angle in radians) of a complex number. +delta2FFF = sp.arg(complexamp2FFF) # sp.re(complexamp2)/sp.cos(delta2) (this is the same thing) +delta3FFF = sp.arg(complexamp3FFF) + +### Ampolitude and phase +#Wrap phases for plots + +wrap1 = (delta1)%(2*sp.pi) +wrap2 = (delta2)%(2*sp.pi) +wrap3 = (delta3)%(2*sp.pi) +wrap1FFF = (delta1FFF)%(2*sp.pi) +wrap2FFF = (delta2FFF)%(2*sp.pi) +wrap3FFF = (delta3FFF)%(2*sp.pi) + +#Solve for amplitude coefficients (real amplitude A - not complex) +amp1 = sp.Abs(complexamp1) +amp2 = sp.Abs(complexamp2) +amp3 = sp.Abs(complexamp3) +amp1FFF = sp.Abs(complexamp1FFF) +amp2FFF = sp.Abs(complexamp2FFF) +amp3FFF = sp.Abs(complexamp3FFF) + +#lambdify curves using sympy +#c = amplitude (not complex), t = phase +#re and im are the real and imaginary parts of complex number + +c1 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), amp1) +t1 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), wrap1) + +c2 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), amp2) +t2 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), wrap2) + +c3 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), amp3) +t3 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), wrap3) + +re1 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.re(complexamp1)) +im1 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.im(complexamp1)) +re2 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.re(complexamp2)) +im2 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.im(complexamp2)) +re3 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.re(complexamp3)) +im3 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.im(complexamp3)) + + +c1FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), amp1FFF) +t1FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), wrap1FFF) + +c2FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), amp2FFF) +t2FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), wrap2FFF) + +c3FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), amp3FFF) +t3FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), wrap3FFF) + +re1FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.re(complexamp1FFF)) +im1FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.im(complexamp1FFF)) +re2FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.re(complexamp2FFF)) +im2FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.im(complexamp2FFF)) +re3FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.re(complexamp3FFF)) +im3FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.im(complexamp3FFF)) + +#define functions + +#curve = (real) amplitude, theta = phase, e = error (i.e. noise) +#realamp, imamp = real and imaginary parts of complex number + +def curve1(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return c1FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return c1(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + +def theta1(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return t1FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + else: #force just m1 + return t1(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + +def curve2(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return c2FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return c2(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + +def theta2(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return t2FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + else: #force just m1 + return t2(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + +def curve3(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return c3FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return c3(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + +def theta3(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return t3FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + else: #force just m1 + return t3(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + +def realamp1(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return re1FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return re1(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + +def imamp1(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return im1FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + else: #force just m1 + return im1(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + +def realamp2(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return re2FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return re2(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + +def imamp2(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return im2FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + else: #force just m1 + return im2(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + +def realamp3(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return re3FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return re3(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + +def imamp3(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return im3FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + else: #force just m1 + return im3(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + + +''' Let's create some graphs ''' + +#Amplitude and phase vs frequency +freq = np.linspace(.01,5,500) +amps1 = curve1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False) +phase1 = theta1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False) +fig, ax1 = plt.subplots() +ax1.plot(freq, amps1,'r-', label='Amplitude') +ax1.set_xlabel('Frequency') +ax1.set_ylabel('Amplitude') +ax2 = ax1.twinx() +ax2.plot(freq, phase1,'b-', label='Phase') +ax2.set_ylabel('Phase') +ax1.legend(loc='upper right') +ax2.legend(loc='center right') + +# #Z_1 - complex plane +# realpart1 = realamp1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False) +# impart1 = imamp1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False) +# plt.plot(realpart1, impart1, 'go', linestyle='dashed') +# plt.xlabel('Re(Z)') +# plt.ylabel('Im(Z)') +# plt.title('$Z_1(w)$') + +''' Below is more efficient I think. + But the runtime for the code is still a bit long. ''' + +##Another way to graph the complex plane! Probably faster as we get more complex amps + +def complexamp(A,phi): #takes a real amplitude and phase and returns a complex number + return A * np.exp(1j*phi) + +# freq = np.linspace(.01,5,500) +# Z1 = (complexamp(curve1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False), theta1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False))) +# Z2 = (complexamp(curve2(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False), theta2(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False))) +# Z3 = (complexamp(curve3(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False), theta3(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False))) + +# Just the first complex amplitude +# plt.plot(Z1.real, Z1.imag, 'go', linestyle = 'dashed') +# plt.xlabel('Re($Z_1$)') +# plt.ylabel('Im($Z_1$)') +# plt.title('$Z_1(w)$') + + +##Another way to graph frequency vs amplitude! +# goes the other way around + +def amp(a,b): + return np.sqrt(a**2 + b**2) + +def A_from_Z(Z): # calculate amplitude of complex number + return amp(Z.real, Z.imag) + +# freq = np.linspace(.01,5,500) +# Z1 = (complexamp(curve1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False), theta1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False))) +# Z2 = (complexamp(curve2(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False), theta2(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False))) +# Z3 = (complexamp(curve3(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False), theta3(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False))) + +# amps1 = A_from_Z(Z1) +# plt.plot(freq, amps1, 'r-') +# plt.xlabel('Frequency') +# plt.ylabel('Amplitude)') +# plt.title('$Z_1(w)$') + + +''' Create data for Trimer NetMAP ''' + +#Complex amps at a frequency +#Can call this function in other code :) +def calculate_spectra(drive, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, e, force_all): + Z1 = (complexamp(curve1(drive, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, e, force_all), theta1(drive, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, e, force_all))) + Z2 = (complexamp(curve2(drive, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, e, force_all), theta2(drive, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, e, force_all))) + Z3 = (complexamp(curve3(drive, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, e, force_all), theta3(drive, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, e, force_all))) + + return Z1, Z2, Z3 + + diff --git a/trimer/Curve Fit Testing/Vary_one_initial_guess.py b/trimer/Curve Fit Testing/Vary_one_initial_guess.py new file mode 100644 index 0000000..5404c08 --- /dev/null +++ b/trimer/Curve Fit Testing/Vary_one_initial_guess.py @@ -0,0 +1,238 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Thu Jul 11 10:01:48 2024 + +@author: lydiabullock +""" + +import numpy as np +import matplotlib.pyplot as plt +from lmfit import Model +import pandas as pd +from Trimer_simulator import c1, c2, c3, t1, t2, t3, re1, re2, re3, im1, im2, im3 + +''' Function below varies one initial guess parameter at a time for the mass of choice (1,2,3) of a trimer system. +Curve fits for dependent variable of choice (amp, phase, real, im) versus frequency. +Takes a number (1,2,3) for the mass you want to analyze +Takes a string (amp, phase, real, im) for the dependent variable of your data (independent variable is frequency) +''' + +def vary_one_initial_guess(which_mass, which_graph): + + #list to store the dataframe for each time we change a different parameter + all_data = [] + + for param in ['m1', 'm2', 'm3', 'b1', 'b2', 'b3', 'k1', 'k2', 'k3', 'k4', 'F']: + + #type of function to fit for curves + #c = amplitude + #t = phase + #re = real part + #im = imaginary part + def function(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3): + if which_mass == 1: + if which_graph == 'amp': + return c1(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + elif which_graph == 'phase': + return t1(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + elif which_graph == 'real': + return re1(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + else: + return im1(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + if which_mass == 2: + if which_graph == 'amp': + return c2(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + elif which_graph == 'phase': + return t2(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + elif which_graph == 'real': + return re2(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + else: + return im2(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + if which_mass == 3: + if which_graph == 'amp': + return c3(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + elif which_graph == 'phase': + return t3(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + elif which_graph == 'real': + return re3(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + else: + return im3(w, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + + #create data for graph + freq = np.linspace(0.001, 5, 100) + dependent = function(freq, 3, 3, 3, 0, 2, 2, 2, 1, 5, 5, 5) + + #create dictionary for storing data - only varying one guess + data = {f'{param}_guess': [], + 'm1_recovered': [], 'm2_recovered': [], 'm3_recovered': [], + 'b1_recovered': [], 'b2_recovered': [], 'b3_recovered': [], + 'k1_recovered': [], 'k2_recovered': [], 'k3_recovered': [], 'k4_recovered': [], + 'F_recovered': [], + '1-r_squared': []} + + #correct parameters + parameters = {'k1':3, 'k2': 3, 'k3': 3, 'k4': 0, + 'b1': 2, 'b2': 2, 'b3': 2, 'F': 1, + 'm1': 5, 'm2': 5, 'm3': 5} + + #changes the first parameter (k1) by increments of 0.1 (starting at 0) + increments_list = [i / 10 for i in range(0,5)] + for trial in range(len(increments_list)): + altered_guess = parameters.copy() + altered_guess[param] = increments_list[trial] + data[f'{param}_guess'].append(altered_guess[param]) #taken from Sam + + try: + + #curve fitting + model = Model(function) + params = model.make_params(k1=altered_guess['k1'], k2=altered_guess['k2'], k3=altered_guess['k3'], k4=altered_guess['k4'], + b1=altered_guess['b1'], b2=altered_guess['b2'], b3=altered_guess['b3'], + F=altered_guess['F'], m1=altered_guess['m1'], m2=altered_guess['m2'], m3=altered_guess['m3'] + ) + result = model.fit(dependent, params, w=freq) + + #add recovered parameters to dictionary + for param_name in ['m1', 'm2', 'm3', 'b1', 'b2', 'b3', 'k1', 'k2', 'k3', 'k4', 'F']: + param_value = result.params[param_name].value + data[f'{param_name}_recovered'].append(param_value) + + + # Extracting the R-squared value + r_squared = 1 - result.residual.var() / np.var(dependent) + #add r_squared to dictionary + data['1-r_squared'].append(1-r_squared) + + #Graph! + #graph original data + plt.figure(figsize=(8,6)) + plt.plot(freq, dependent, 'go', label='Original Data') + + #generate points for fitted curve + freq_fit = np.linspace(min(freq),max(freq), 500) #more w-values than before + dependent_fit = result.model.func(freq_fit, **result.best_values) + + #graph fitted curve + plt.plot(freq_fit, dependent_fit, '-', label='Fitted Curve') + + #graph parts + + + #setting labels for graph + if which_mass == 1: + save_fig_name = f'Mass_1_vary_{param}_plot_{trial+1}.png' + title = f'Trimer Curve Fitting for Mass 1 - Varying {param} - #{trial+1}' + if which_graph == 'amp': + ylabel = 'Amplitude (Hz)' + elif which_graph == 'phase': + ylabel = 'Phase (rad)' + elif which_graph == 'real': + ylabel = 'Real Part' + else: + ylabel = 'Imaginary Part' + if which_mass == 2: + save_fig_name = f'Mass_2_vary_{param}_plot_{trial+1}.png' + title = f'Trimer Curve Fitting for Mass 2 - Varying {param} - #{trial+1}' + if which_graph == 'amp': + ylabel = 'Amplitude (Hz)' + elif which_graph == 'phase': + ylabel = 'Phase (rad)' + elif which_graph == 'real': + ylabel = 'Real Part' + else: + ylabel = 'Imaginary Part' + if which_mass == 3: + save_fig_name = f'Mass_3_vary_{param}_plot_{trial+1}.png' + title = f'Trimer Curve Fitting for Mass 3 - Varying {param} - #{trial+1}' + if which_graph == 'amp': + ylabel = 'Amplitude (Hz)' + elif which_graph == 'phase': + ylabel = 'Phase (rad)' + elif which_graph == 'real': + ylabel = 'Real Part' + else: + ylabel = 'Imaginary Part' + + plt.legend(loc='best') + plt.xlabel('Frequency (Hz)') + plt.ylabel(ylabel) + plt.title(title) + + #file path specific to computer you are working on + plt.savefig(save_fig_name) + plt.show() + + except RuntimeError: + #If it takes too long, appends 0s to the dictionary + #R^2 = 0 means parameters not recovered + #Sam did this - I've run it a couple times and I haven't had any 0s yet + data['k1_recovered'].append(0) + data['k2_recovered'].append(0) + data['k3_recovered'].append(0) + data['k4_recovered'].append(0) + data['b1_recovered'].append(0) + data['b2_recovered'].append(0) + data['b3_recovered'].append(0) + data['F_recovered'].append(0) + data['m1_recovered'].append(0) + data['m2_recovered'].append(0) + data['m3_recovered'].append(0) + data['1-r_squared'].append(0) + + #put all this data in a spreadsheet! + df = pd.DataFrame(data) + all_data.append(df) + + #file path specific to computer you are working on + file_path = 'Changing_k1_M2-Amplitude.xlsx' + + if which_mass == 1: + if which_graph == 'amp': + file_path = 'Changing_1_Guess_M1-Amplitude.xlsx' + elif which_graph == 'phase': + file_path = 'Changing_1_Guess_M1-Phase.xlsx' + elif which_graph == 'real': + file_path = 'Changing_1_Guess_M1-Real.xlsx' + else: + file_path = 'Changing_1_Guess_M1-Imaginary.xlsx' + if which_mass == 2: + if which_graph == 'amp': + file_path = 'Changing_1_Guess_M2-Amplitude.xlsx' + elif which_graph == 'phase': + file_path = 'Changing_1_Guess_M2-Phase.xlsx' + elif which_graph == 'real': + file_path = 'Changing_1_Guess_M2-Real.xlsx' + else: + file_path = 'Changing_1_Guess_M2-Imaginary.xlsx' + if which_mass == 3: + if which_graph == 'amp': + file_path = 'Changing_1_Guess_M3-Amplitude.xlsx' + elif which_graph == 'phase': + file_path = 'Changing_1_Guess_M3-Phase.xlsx' + elif which_graph == 'real': + file_path = 'Changing_1_Guess_M2-Real.xlsx' + else: + file_path = 'Changing_1_Guess_M2-Imaginary.xlsx' + + #Puts each dataframe into its own sheet on the spreadsheet + #make sure in correct order: ['m1', 'm2', 'm3', 'b1', 'b2', 'b3', 'k1', 'k2', 'k3', 'k4', 'F'] + with pd.ExcelWriter(file_path, engine='xlsxwriter') as writer: + all_data[0].to_excel(writer, sheet_name='vary m1', index=False) + all_data[1].to_excel(writer, sheet_name='vary m2', index=False) + all_data[2].to_excel(writer, sheet_name='vary m3', index=False) + all_data[3].to_excel(writer, sheet_name='vary b1', index=False) + all_data[4].to_excel(writer, sheet_name='vary b2', index=False) + all_data[5].to_excel(writer, sheet_name='vary b3', index=False) + all_data[6].to_excel(writer, sheet_name='vary k1', index=False) + all_data[7].to_excel(writer, sheet_name='vary k2', index=False) + all_data[8].to_excel(writer, sheet_name='vary k3', index=False) + all_data[9].to_excel(writer, sheet_name='vary k4', index=False) + all_data[10].to_excel(writer, sheet_name='vary F', index=False) + + print('All files saved') + + + + + diff --git a/trimer/Curvefit_compare_scale_vs_fix_F.py b/trimer/Curvefit_compare_scale_vs_fix_F.py new file mode 100644 index 0000000..8ffe986 --- /dev/null +++ b/trimer/Curvefit_compare_scale_vs_fix_F.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Wed Jul 17 14:25:50 2024 + +@author: lydiabullock +""" +from curve_fitting_amp_phase_all import multiple_fit_amp_phase +from curve_fitting_X_Y_all import multiple_fit_X_Y +import pandas as pd +import numpy as np + +def complex_noise(n, noiselevel): + global complexamplitudenoisefactor + complexamplitudenoisefactor = 0.0005 + return noiselevel* complexamplitudenoisefactor * np.random.randn(n,) + +#Make parameters/initial guesses - [k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3] +#Note that right now we only scale/fix by F, so make sure to keep F correct in guesses +true_params = [4, 3, 2, 1, 1, 2, 3, 1, 1, 1, 1] +guessed_params = [4.023, 3, 1.909, 0.80911, 1.2985, 2, 2.891, 1, 1, 1.11, 1] + +starting_row = 0 + +with pd.ExcelWriter('Curve_Fit_Simultaneously_Scale_vs_Fix.xlsx', engine='xlsxwriter') as writer: + for i in range(5): + + #Create noise + e = complex_noise(300, 2) + + #Get the data! + dataframe1 = multiple_fit_amp_phase(guessed_params, true_params, e, False, False) #Scaled + dataframe2 = multiple_fit_amp_phase(guessed_params, true_params, e, False, True) #Fixed + dataframe3 = multiple_fit_X_Y(guessed_params, true_params, e, False, False) #Scaled + dataframe4 = multiple_fit_X_Y(guessed_params, true_params, e, False, True) #Fixed + + #Add to excel spreadsheet + + dataframe1.to_excel(writer, sheet_name='Amp & Phase - Scaled vs Fixed F', startrow=starting_row, index=False) + dataframe2.to_excel(writer, sheet_name='Amp & Phase - Scaled vs Fixed F', startrow=starting_row+2, index=False, header=False) + dataframe3.to_excel(writer, sheet_name='X & Y - Scaled vs Fixed F', startrow=starting_row, index=False) + dataframe4.to_excel(writer, sheet_name='X & Y - Scaled vs Fixed F', startrow=starting_row+2, index=False, header=False) + + starting_row += 4 \ No newline at end of file diff --git a/trimer/Graphing_each_iteration.py b/trimer/Graphing_each_iteration.py new file mode 100644 index 0000000..7608a48 --- /dev/null +++ b/trimer/Graphing_each_iteration.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Mon Sep 30 22:51:27 2024 + +@author: Lydia Bullock +""" + +import numpy as np +import matplotlib.pyplot as plt +from lmfit import minimize, Parameters +from Trimer_simulator import curve1, theta1, curve2, theta2, curve3, theta3, c1, t1, c2, t2, c3, t3 +from comparing_curvefit_types import complex_noise, syserr +import seaborn as sns + +def residuals(params, wd, Amp1_data, Amp2_data, Amp3_data, Phase1_data, Phase2_data, Phase3_data): + k1 = params['k1'].value + k2 = params['k2'].value + k3 = params['k3'].value + k4 = params['k4'].value + b1 = params['b1'].value + b2 = params['b2'].value + b3 = params['b3'].value + F = params['F'].value + m1 = params['m1'].value + m2 = params['m2'].value + m3 = params['m3'].value + + modelc1 = c1(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + modelc2 = c2(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + modelc3 = c3(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + modelt1 = t1(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + modelt2 = t2(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + modelt3 = t3(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + + residc1 = Amp1_data - modelc1 + residc2 = Amp2_data - modelc2 + residc3 = Amp3_data - modelc3 + residt1 = Phase1_data - modelt1 + residt2 = Phase2_data - modelt2 + residt3 = Phase3_data - modelt3 + + return np.concatenate((residc1, residc2, residc3, residt1, residt2, residt3)) + +#Callback function to plot each iteration +def plot_callback(params, iter, resid, *args, **kws): + plt.clf() + if iter % 2 == 0: + + freq = args[0] + Amp1 = args[1] + + #Recall parameters + k1 = params['k1'].value + k2 = params['k2'].value + k3 = params['k3'].value + k4 = params['k4'].value + b1 = params['b1'].value + b2 = params['b2'].value + b3 = params['b3'].value + F = params['F'].value + m1 = params['m1'].value + m2 = params['m2'].value + m3 = params['m3'].value + + #Get model data to plot + modelc1 = c1(freq, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + + # sns.reset_defaults() + # sns.set_context("talk") + # sns.scatterplot(freq, Amp1, 'bo', label='Data') + # sns.scatterplot(freq, modelc1, 'r-', label='Model') + plt.plot(freq, Amp1, 'bo', label='Data') + plt.plot(freq, modelc1, 'r-', label='Model') + plt.ylim(ymax=1.6) + plt.title(f"Trimer Resonator System - Iteration: {iter}", fontsize=18) + plt.ylabel('Amplitude (m)', fontsize=16) + plt.xlabel('Frequency (Hz)', fontsize=16) + plt.legend(fontsize=14) + plt.pause(0.1) + +'''Begin Work Here''' + +##Create data and system parameters +freq = np.linspace(0.001, 4, 300) + +e = complex_noise(300, 2) +force_all = False +#this is using System 10 of 15 Systems - 10 Freqs NetMAP Better Params +# params_correct = [5.385, 7.276, 5.271, 4.382, 0.984, 0.646, 0.775, 1, 3.345, 9.26, 7.439] +# params_guess = [4.6455, 7.1909, 4.9103, 3.4398, 1.0832, 0.596, 0.6245, 1, 3.4532, 8.7681, 8.7575] + +#this is using System 7 of 15 Systems - 10 Freqs NetMAP Better Params +params_correct = [1.427, 6.472, 3.945, 3.024, 0.675, 0.801, 0.191, 1, 7.665, 9.161, 7.139] +params_guess = [1.1942, 5.4801, 3.2698, 3.3004, 0.7682, 0.8185, 0.1765, 1, 7.4923, 8.9932, 8.1035] + +#[k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3] + +Amp1 = curve1(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) +Phase1 = theta1(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) \ + + 2 * np.pi +Amp2 = curve2(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) +Phase2 = theta2(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) \ + + 2 * np.pi +Amp3 = curve3(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) +Phase3 = theta3(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) \ + + 2 * np.pi + +#Create parameter guesses +params = Parameters() +params.add('k1', value = params_guess[0], min=0) +params.add('k2', value = params_guess[1], min=0) +params.add('k3', value = params_guess[2], min=0) +params.add('k4', value = params_guess[3], min=0) +params.add('b1', value = params_guess[4], min=0) +params.add('b2', value = params_guess[5], min=0) +params.add('b3', value = params_guess[6], min=0) +params.add('F', value = params_guess[7], min=0) +params.add('m1', value = params_guess[8], min=0) +params.add('m2', value = params_guess[9], min=0) +params.add('m3', value = params_guess[10], min=0) + +params['F'].vary = False + +#Perform minimization and plot each step! +result = minimize(residuals, params, args = (freq, Amp1, Amp2, Amp3, Phase1, Phase2, Phase3), iter_cb=plot_callback) + +#Put information into dictionary +data = {'k1_true': [], 'k2_true': [], 'k3_true': [], 'k4_true': [], + 'b1_true': [], 'b2_true': [], 'b3_true': [], + 'm1_true': [], 'm2_true': [], 'm3_true': [], 'F_true': [], + 'k1_guess': [], 'k2_guess': [], 'k3_guess': [], 'k4_guess': [], + 'b1_guess': [], 'b2_guess': [], 'b3_guess': [], + 'm1_guess': [], 'm2_guess': [], 'm3_guess': [], 'F_guess': [], + 'k1_recovered': [], 'k2_recovered': [], 'k3_recovered': [], 'k4_recovered': [], + 'b1_recovered': [], 'b2_recovered': [], 'b3_recovered': [], + 'm1_recovered': [], 'm2_recovered': [], 'm3_recovered': [], 'F_recovered': [], + 'e_k1': [], 'e_k2': [], 'e_k3': [], 'e_k4': [], + 'e_b1': [], 'e_b2': [], 'e_b3': [], 'e_F': [], + 'e_m1': [], 'e_m2': [], 'e_m3': []} + +#Create dictionary of true parameters from list provided (need for compliting data bc I can't do it with a list) +true_params = {'k1': params_correct[0], 'k2': params_correct[1], 'k3': params_correct[2], 'k4': params_correct[3], + 'b1': params_correct[4], 'b2': params_correct[5], 'b3': params_correct[6], 'F': params_correct[7], + 'm1': params_correct[8], 'm2': params_correct[9], 'm3': params_correct[10]} + +for param_name in ['k1','k2','k3','k4','b1','b2','b3','F','m1','m2','m3']: + #Add true parameters to dictionary + param_true = true_params[param_name] + data[f'{param_name}_true'].append(param_true) + + #Add guessed parameters to dictionary + param_guess = params[param_name].value + data[f'{param_name}_guess'].append(param_guess) + + #Add fitted parameters to dictionary + param_fit = result.params[param_name].value + data[f'{param_name}_recovered'].append(param_fit) + + #Calculate systematic error and add to dictionary + systematic_error = syserr(param_fit, param_true) + data[f'e_{param_name}'].append(systematic_error) + diff --git a/Three Coupled Resonator Model.ipynb b/trimer/Three Coupled Resonator Model.ipynb similarity index 100% rename from Three Coupled Resonator Model.ipynb rename to trimer/Three Coupled Resonator Model.ipynb diff --git a/trimer/Trimer_NetMAP.py b/trimer/Trimer_NetMAP.py new file mode 100644 index 0000000..1ff143d --- /dev/null +++ b/trimer/Trimer_NetMAP.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Fri Mar 22 15:57:10 2024 + +@author: samfeldman & lydiabullock +""" +import numpy as np +from Trimer_simulator import calculate_spectra + +''' THIS IS THE NETMAP PART ''' + +def Zmatrix(freq, complexamp1, complexamp2, complexamp3, force_all): + Zmatrix = [] + for rowindex in range(len(freq)): + w = freq[rowindex] + Z1 = complexamp1[rowindex] + Z2 = complexamp2[rowindex] + Z3 = complexamp3[rowindex] + + Zmatrix.append([-w**2*np.real(Z1), 0, 0, -w*np.imag(Z1), 0, 0, np.real(Z1), + np.real(Z1)-np.real(Z2), 0, 0, -1]) + Zmatrix.append([-w**2*np.imag(Z1), 0, 0, w*np.real(Z1), 0, 0, np.imag(Z1), + np.imag(Z1) - np.imag(Z2), 0, 0, 0]) + + if force_all: + Zmatrix.append([0, -w**2*np.real(Z2), 0, 0, -w*np.imag(Z2), 0, 0, + np.real(Z2)-np.real(Z1), np.real(Z2) - np.real(Z3), 0, -1]) + else: + Zmatrix.append([0, -w**2*np.real(Z2), 0, 0, -w*np.imag(Z2), 0, 0, + np.real(Z2)-np.real(Z1), np.real(Z2) - np.real(Z3), 0, 0]) + + Zmatrix.append([0, -w**2*np.imag(Z2), 0, 0, w*np.real(Z2), 0, 0, + np.imag(Z2)-np.imag(Z1), np.imag(Z2) - np.imag(Z3), 0, 0]) + + if force_all: + Zmatrix.append([0, 0, -w**2*np.real(Z3), 0, 0, -w*np.imag(Z3), 0, 0, + np.real(Z3)-np.real(Z2), np.real(Z3), -1]) + else: + Zmatrix.append([0, 0, -w**2*np.real(Z3), 0, 0, -w*np.imag(Z3), 0, 0, + np.real(Z3)-np.real(Z2), np.real(Z3), 0]) + + Zmatrix.append([0, 0, -w**2*np.imag(Z3), 0, 0, w*np.real(Z3), 0, 0, + np.imag(Z3)-np.imag(Z2), np.imag(Z3), 0]) + + return np.array(Zmatrix) + +def unnormalizedparameters(Zmatrix): + U, S, Vh = np.linalg.svd(Zmatrix) + V = Vh.conj().T + return V[:,-1] #Will it always be the last column of V?? + +def normalize_parameters_1d_by_force(unnormalizedparameters, F_set): + # parameters vector: 'm1', 'm2', 'm3', 'b1', 'b2', 'b3', 'k1', 'k2', 'k3', 'k4', 'Driving Force' + c = F_set / unnormalizedparameters[-1] + parameters = [c*unnormalizedparameters[k] for k in range(len(unnormalizedparameters)) ] + return parameters + +def complex_noise(n, noiselevel): + global complexamplitudenoisefactor + complexamplitudenoisefactor = 0.0005 + return noiselevel* complexamplitudenoisefactor * np.random.randn(n,) + +''' Example work begins here. ''' + +#This is the data for NetMAP to work with. Using the same data as Sam in thesis +f1 = 1.7 +f2 = 2.3 +m1 = 3 +m2 = 3 +m3 = 3 +b1 = 0.1 +b2 = 0.1 +b3 = 0.1 +k1 = 5 +k2 = 5 +k3 = 5 +k4 = 1 #no fourth spring connecting mass 4 to wall in this +F = 1 + +#create some noise +e = complex_noise(2, 2) #number of frequencies, noise level +frequencies = [f1, f2] + +# getting the complex amplitudes with a function from Trimer_simulator +comamps1, comamps2, comamps3 = calculate_spectra(frequencies, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3, e, False) + +#Now that we have the data, create the Zmatrix: +trizmatrix = Zmatrix(frequencies, comamps1, comamps2, comamps3, False) + +#Get the unnormalized parameters: +notnormparam_tri = unnormalizedparameters(trizmatrix) + +#Normalize the parameters +final_tri = normalize_parameters_1d_by_force(notnormparam_tri, 1) + +# print(final_tri) +# it works! finally! diff --git a/trimer/Trimer_curvefit.py b/trimer/Trimer_curvefit.py new file mode 100644 index 0000000..c86d7bf --- /dev/null +++ b/trimer/Trimer_curvefit.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Fri Mar 22 15:53:26 2024 + +@author: samfeldman +""" + +import numpy as np +import matplotlib.pyplot as plt +from scipy.optimize import curve_fit +#from sklearn.metrics import r2_score #Lydia - I don't have this package on my computer +from Trimer_simulator import c1 + +freq = np.linspace(.01, 5, 500) +A = c1(freq, 5, 5, 3, 5, .1, .1, .1, 1, 5, 2, 1) + +def curve_func(freq, k_1, k_2, k_3, k_4, b1, b2, b3, F, m1, m2, m3): + return c1(freq, k_1, k_2, k_3, k_4, b1, b2, b3, F, m1, m2, m3) + +initial_guess = [5, 5, 3, 5, .1, .1, .1, 1, 5, 2, 1] +# Perform curve fitting +popt, pcov = curve_fit(curve_func, freq, A, p0=initial_guess) + +# Extract fitting constants +k_1_fit, k_2_fit, k_3_fit, k_4_fit, b1_fit, b2_fit, b3_fit, F_fit, m1_fit, m2_fit, m3_fit = popt + +# Print the fitting parameters +print("Fitting Parameters:") +print("k1:", k_1_fit) +print("k2:", k_2_fit) +print("k3:", k_3_fit) +print("k4:", k_4_fit) +print("b1:", b1_fit) +print("b2:", b2_fit) +print("b3:", b3_fit) +print("F:", F_fit) +print("m1:", m1_fit) +print("m2:", m2_fit) +print("m3:", m3_fit) + +# Plotting +plt.figure(figsize=(8, 6)) +plt.scatter(freq, A, label='Original Data') +plt.xlabel('Frequency (f)') +plt.ylabel('A') +plt.title('Curve Fitting with Three Peaks') +plt.grid(True) + +# Generate points for the fitted curve +freq_fit = np.linspace(min(freq), max(freq), 500) +A_fit = curve_func(freq_fit, *popt) + +# Plot the fitted curve +plt.plot(freq_fit, A_fit, color='red', label='Fitted Curve') + +# Add legend +plt.legend() + +# Show plot +plt.show() + +# Calculate R-squared +#r_squared = r2_score(A, curve_func(freq, *popt)) + +# Print R-squared value +#print("R-squared:", r_squared) + \ No newline at end of file diff --git a/trimer/Trimer_curvefit_lmfit.py b/trimer/Trimer_curvefit_lmfit.py new file mode 100644 index 0000000..1d3d54b --- /dev/null +++ b/trimer/Trimer_curvefit_lmfit.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Mon Jul 8 15:13:48 2024 + +@author: lydiabullock +""" + +import numpy as np +import matplotlib.pyplot as plt +from lmfit import Model +from Trimer_simulator import c1, c2, c3 + +#type of function to fit for all three amplitude curves +def c1_function(w, k_1, k_2, k_3, k_4, b1, b2, b3, F, m1, m2, m3): + return c1(w, k_1, k_2, k_3, k_4, b1, b2, b3, F, m1, m2, m3) +def c2_function(w, k_1, k_2, k_3, k_4, b1, b2, b3, F, m1, m2, m3): + return c2(w, k_1, k_2, k_3, k_4, b1, b2, b3, F, m1, m2, m3) +def c3_function(w, k_1, k_2, k_3, k_4, b1, b2, b3, F, m1, m2, m3): + return c3(w, k_1, k_2, k_3, k_4, b1, b2, b3, F, m1, m2, m3) + +#create data for all three amplitudes +freq = np.linspace(0, 5, 300) +A_c1 = c1_function(freq, 3, 3, 3, 0, 2, 2, 2, 1, 5, 5, 5) +# A_c2 = c2_function(freq, 3, 3, 3, 0, 2, 2, 2, 1, 5, 5, 5) +# A_c3 = c3_function(freq, 3, 3, 3, 0, 2, 2, 2, 1, 5, 5, 5) + +model1 = Model(c1_function) +# model2 = Model(c2_function) +# model3 = Model(c3_function) + +#make parameters/initial guesses +#true parameters = [3, 3, 3, 0, 2, 2, 2, 1, 5, 5, 5] +initial_guesses = { + 'k_1': 3, + 'k_2': 3, + 'k_3': 3.53, + 'k_4': 0, + 'b1': 1, + 'b2': 0.9, + 'b3': 0.1, + 'F': 1, + 'm1': 5, + 'm2': 5, + 'm3': 4.5 +} + +params1 = model1.make_params(**initial_guesses) +# params2 = model2.make_params(**initial_guesses) +# params3 = model3.make_params(**initial_guesses) + +graph1 = model1.fit(A_c1, params1, w=freq) +# graph2 = model2.fit(A_c2, params2, w=freq) +# graph3 = model3.fit(A_c3, params3, w=freq) + +#print(graph1.fit_report()) +#print(graph2.fit_report()) +#print(graph3.fit_report()) + +##Graph it! + +#original data +plt.plot(freq, A_c1, 'bo', label='Data C1') +# plt.plot(freq, A_c2, 'go', label='Data C2') +# plt.plot(freq, A_c3, 'ro', label='Data C3') + +#generate points for fitted curve +freq_fit = np.linspace(min(freq),max(freq), 500) #more w-values than before +A_c1_fit = graph1.model.func(freq_fit, **graph1.best_values) +#A_c2_fit = graph2.model.func(freq_fit, **graph2.best_values) +#A_c3_fit = graph3.model.func(freq_fit, **graph3.best_values) + +#fitted curve +plt.plot(freq_fit, A_c1_fit, '-', label='Fitted Curve 1') +# plt.plot(freq_fit, A_c2_fit, '-', label='Fitted Curve 2') +# plt.plot(freq_fit, A_c3_fit, '-', label='Fitted Curve 3') + +#generate points for guessed parameters curve +freq_guess = np.linspace(min(freq),max(freq), 500) +A_c1_guess = c1_function(freq_guess, **initial_guesses) +# A_c2_guess = c2_function(freq_guess, initial_guesses) +# A_c3_guess = c3_function(freq_guess, initial_guesses) + +#guessed parameters curve +plt.plot(freq_guess, A_c1_guess, linestyle='dashed', label='Guessed Parameters 1') +# plt.plot(freq_guess, A_c2_guess, linestyle='dashed', label='Guessed Parameters 2') +# plt.plot(freq_guess, A_c3_guess, linestyle='dashed', label='Guessed Parameters 3') + +#graph parts +plt.legend(loc='best') +plt.xlabel('Frequency (Hz)') +plt.ylabel('Amplitude (m)') +plt.title('Curve Fitting Trimer') +plt.show() + + diff --git a/trimer/Trimer_simulator.py b/trimer/Trimer_simulator.py new file mode 100644 index 0000000..25a2bcb --- /dev/null +++ b/trimer/Trimer_simulator.py @@ -0,0 +1,317 @@ +# -*- coding: utf-8 -*- +""" +Spyder Editor + +This is a temporary script file. +""" + +''' Create code that simulates spectrum response for trimer + See if we can recover the parameters + Does NOT include noise ''' + +import numpy as np +import sympy as sp +import matplotlib.pyplot as plt + +#Define all variables for sympy + +#individual springs that correspond to individual masses +k1 = sp.symbols('k_1', real = True) + +#springs that connect two masses +k2 = sp.symbols('k_2', real = True) +k3 = sp.symbols('k_3', real = True) +k4 = sp.symbols('k_4', real = True) + +#damping coefficients +b1 = sp.symbols('b1', real = True) +b2 = sp.symbols('b2', real = True) +b3 = sp.symbols('b3', real = True) + +#masses +m1 = sp.symbols('m1', real = True) +m2 = sp.symbols('m2', real = True) +m3 = sp.symbols('m3', real = True) + +#Driving force amplitude +F = sp.symbols('F', real = True) + +#driving frequency (leave as variable) +wd = sp.symbols(r'\omega_d', real = True) + +#Symbolically solve for driving amplitudes and phase using sympy + +### Trimer +#Matrix for complex equations of motion, Matrix . Zvec = Fvec +unknownsmatrix = sp.Matrix([[-wd**2*m1 + 1j*wd*b1 + k1 + k2, -k2, 0], + [-k2, -wd**2*m2 + 1j*wd*b2 + k2 + k3, -k3], + [0, -k3, -wd**2*m3 + 1j*wd*b3 + k3 + k4]]) + +''' Lydia - I'm pretty sure he had a mistake in the unknowns matrix. There were some k4's +showing up where they weren't supposed to be (-k4 where the zeros are now and one +k4 +in the first entry) ''' + +#Matrices for Cramer's Rule: substitute force vector Fvec=[F,0] for each column in turn (m1 is driven, m2 and m3 are not) +unknownsmatrix1 = sp.Matrix([[F, -k2, 0], + [0, -wd**2*m2 + 1j*wd*b2 + k2 + k3, -k3], + [0, -k3, -wd**2*m3 + 1j*wd*b3 + k3 + k4]]) +unknownsmatrix2 = sp.Matrix([[-wd**2*m1 + 1j*wd*b1 + k1 + k2, F, 0], + [-k2, 0, -k3], + [0, 0, -wd**2*m3 + 1j*wd*b3 + k3 + k4]]) +unknownsmatrix3 = sp.Matrix([[-wd**2*m1 + 1j*wd*b1 + k1 + k2, -k3, F], + [-k2, -wd**2*m2 + 1j*wd*b2 + k2 + k3, 0], + [0, -k3, 0]]) + +#Apply Cramer's Rule to solve for Zvec +complexamp1, complexamp2, complexamp3 = (unknownsmatrix1.det()/unknownsmatrix.det(), + unknownsmatrix2.det()/unknownsmatrix.det(), + unknownsmatrix3.det()/unknownsmatrix.det()) + +#Solve for phases for each mass +delta1 = sp.arg(complexamp1) # Returns the argument (phase angle in radians) of a complex number. +delta2 = sp.arg(complexamp2) # sp.re(complexamp2)/sp.cos(delta2) (this is the same thing) +delta3 = sp.arg(complexamp3) + + +### What if we apply the same force to all three masses of dimer? +#Matrices for Cramer's Rule: substitute force vector Fvec=[F,0] for each column in turn (m1 is driven, m2 is not) +unknownsmatrix1FFF = sp.Matrix([[F, -k2, 0], + [F, -wd**2*m2 + 1j*wd*b2 + k2 + k3, -k3], + [F, -k3, -wd**2*m3 + 1j*wd*b3 + k3 + k4]]) +unknownsmatrix2FFF = sp.Matrix([[-wd**2*m1 + 1j*wd*b1 + k1 + k2, F, 0], + [-k2, F, -k3], + [0, F, -wd**2*m3 + 1j*wd*b3 + k3 + k4]]) +unknownsmatrix3FFF = sp.Matrix([[-wd**2*m1 + 1j*wd*b1 + k1 + k2, -k2, F], + [-k2, -wd**2*m2 + 1j*wd*b2 + k2 + k3,F], + [0, -k3, F]]) +#Apply Cramer's Rule to solve for Zvec +complexamp1FFF, complexamp2FFF, complexamp3FFF = (unknownsmatrix1FFF.det()/unknownsmatrix.det(), + unknownsmatrix2FFF.det()/unknownsmatrix.det(), + unknownsmatrix3FFF.det()/unknownsmatrix.det()) +#Solve for phases for each mass +delta1FFF = sp.arg(complexamp1FFF) # Returns the argument (phase angle in radians) of a complex number. +delta2FFF = sp.arg(complexamp2FFF) # sp.re(complexamp2)/sp.cos(delta2) (this is the same thing) +delta3FFF = sp.arg(complexamp3FFF) + +### Ampolitude and phase +#Wrap phases for plots + +wrap1 = (delta1)%(2*sp.pi) +wrap2 = (delta2)%(2*sp.pi) +wrap3 = (delta3)%(2*sp.pi) +wrap1FFF = (delta1FFF)%(2*sp.pi) +wrap2FFF = (delta2FFF)%(2*sp.pi) +wrap3FFF = (delta3FFF)%(2*sp.pi) + +#Solve for amplitude coefficients (real amplitude A - not complex) +amp1 = sp.Abs(complexamp1) +amp2 = sp.Abs(complexamp2) +amp3 = sp.Abs(complexamp3) +amp1FFF = sp.Abs(complexamp1FFF) +amp2FFF = sp.Abs(complexamp2FFF) +amp3FFF = sp.Abs(complexamp3FFF) + +#lambdify curves using sympy +#c = amplitude (not complex), t = phase +#re and im are the real and imaginary parts of complex number + +c1 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), amp1) +t1 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), wrap1) + +c2 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), amp2) +t2 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), wrap2) + +c3 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), amp3) +t3 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), wrap3) + +re1 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.re(complexamp1)) +im1 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.im(complexamp1)) +re2 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.re(complexamp2)) +im2 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.im(complexamp2)) +re3 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.re(complexamp3)) +im3 = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.im(complexamp3)) + + +c1FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), amp1FFF) +t1FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), wrap1FFF) + +c2FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), amp2FFF) +t2FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), wrap2FFF) + +c3FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), amp3FFF) +t3FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), wrap3FFF) + +re1FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.re(complexamp1FFF)) +im1FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.im(complexamp1FFF)) +re2FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.re(complexamp2FFF)) +im2FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.im(complexamp2FFF)) +re3FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.re(complexamp3FFF)) +im3FFF = sp.lambdify((wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3), sp.im(complexamp3FFF)) + +#define functions + +#curve = (real) amplitude, theta = phase, e = error (i.e. noise) +#realamp, imamp = real and imaginary parts of complex number + +def curve1(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return c1FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return c1(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + +def theta1(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return t1FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + else: #force just m1 + return t1(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + +def curve2(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return c2FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return c2(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + +def theta2(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return t2FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + else: #force just m1 + return t2(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + +def curve3(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return c3FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return c3(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + +def theta3(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return t3FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + else: #force just m1 + return t3(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) - 2*np.pi + e + +def realamp1(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return re1FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return re1(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + +def imamp1(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return im1FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return im1(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + +def realamp2(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return re2FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return re2(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + +def imamp2(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return im2FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return im2(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + +def realamp3(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return re3FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return re3(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + +def imamp3(w, k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3, e, force_all): + with np.errstate(divide='ignore'): + if force_all: + return im3FFF(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + else: #force just m1 + return im3(np.array(w), k_1, k_2, k_3, k_4, b1_, b2_, b_3, F_, m_1, m_2, m_3) + e + + +''' Let's create some graphs ''' + +#Amplitude and phase vs frequency +# freq = np.linspace(.01,5,500) +# amps1 = curve1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False) +# phase1 = theta1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False) +# fig, ax1 = plt.subplots() +# ax1.plot(freq, amps1,'r-', label='Amplitude') +# ax1.set_xlabel('Frequency') +# ax1.set_ylabel('Amplitude') +# ax2 = ax1.twinx() +# ax2.plot(freq, phase1,'b-', label='Phase') +# ax2.set_ylabel('Phase') +# ax1.legend(loc='upper right') +# ax2.legend(loc='center right') + +# #Z_1 - complex plane +# realpart1 = realamp1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False) +# impart1 = imamp1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False) +# plt.plot(realpart1, impart1, 'go', linestyle='dashed') +# plt.xlabel('Re(Z)') +# plt.ylabel('Im(Z)') +# plt.title('$Z_1(w)$') + +''' Below is more efficient I think. + But the runtime for the code is still a bit long. ''' + +##Another way to graph the complex plane! Probably faster as we get more complex amps + +def complexamp(A,phi): #takes a real amplitude and phase and returns a complex number + return A * np.exp(1j*phi) + +# freq = np.linspace(.01,5,500) +# Z1 = (complexamp(curve1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False), theta1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False))) +# Z2 = (complexamp(curve2(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False), theta2(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False))) +# Z3 = (complexamp(curve3(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False), theta3(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False))) + +# Just the first complex amplitude +# plt.plot(Z1.real, Z1.imag, 'go', linestyle = 'dashed') +# plt.xlabel('Re($Z_1$)') +# plt.ylabel('Im($Z_1$)') +# plt.title('$Z_1(w)$') + + +##Another way to graph frequency vs amplitude! +# goes the other way around + +def amp(a,b): + return np.sqrt(a**2 + b**2) + +def A_from_Z(Z): # calculate amplitude of complex number + return amp(Z.real, Z.imag) + +# freq = np.linspace(.01,5,500) +# Z1 = (complexamp(curve1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False), theta1(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False))) +# Z2 = (complexamp(curve2(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False), theta2(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False))) +# Z3 = (complexamp(curve3(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False), theta3(freq, 1,2,3,4,.5,.5,.5, 1, 2, 3, 4, 0 , False))) + +# amps1 = A_from_Z(Z1) +# plt.plot(freq, amps1, 'r-') +# plt.xlabel('Frequency') +# plt.ylabel('Amplitude)') +# plt.title('$Z_1(w)$') + + +''' Create data for Trimer NetMAP ''' + +#Complex amps at a frequency +#Can call this function in other code :) +def calculate_spectra(drive, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, e, force_all): + Z1 = list(complexamp(curve1(drive, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, e, force_all), theta1(drive, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, e, force_all))) + Z2 = list(complexamp(curve2(drive, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, e, force_all), theta2(drive, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, e, force_all))) + Z3 = list(complexamp(curve3(drive, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, e, force_all), theta3(drive, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, e, force_all))) + + return Z1, Z2, Z3 + + diff --git a/trimer/comparing_curvefit_types.py b/trimer/comparing_curvefit_types.py new file mode 100644 index 0000000..ac00949 --- /dev/null +++ b/trimer/comparing_curvefit_types.py @@ -0,0 +1,962 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Thu Jul 18 14:42:41 2024 + +@author: lydiabullock +""" + +''' Which has more accurated recovered parameters: Amp & Phase or X & Y? + Using method of fixing F. ''' + +import os +import pandas as pd +import math +import random +import numpy as np +import matplotlib.pyplot as plt +from curve_fitting_amp_phase_all import multiple_fit_amp_phase +from curve_fitting_X_Y_all import multiple_fit_X_Y +from Trimer_simulator import calculate_spectra, curve1, theta1, curve2, theta2, curve3, theta3, c1, t1, c2, t2, c3, t3, realamp1, realamp2, realamp3, imamp1, imamp2, imamp3, re1, re2, re3, im1, im2, im3 +from Trimer_NetMAP import Zmatrix, unnormalizedparameters, normalize_parameters_1d_by_force +import warnings +import time +import timeit +import statistics + +''' Functions contained: + complex_noise - creates noise, e + syserr - Calculates systematic error + generate_random_system - Randomly generates parameters for system. Parameter values btw 0.1 and 10 for all but the coefficients of friction which is between 0.1 and 1. + plot_guess - Used for the Case Study. Plots just the data and the guessed parameters curve. No curve fitting. + automate_guess - Randomly generates guess parameters within a certain percent of the true parameters + save_figure - Saves figures to a folder of your naming choice. Also allows you to name the figure whatever. + timeit_function - Uses the timeit package to time how long a function takes to run. + - Runs it multiple times (number of your choosing) and returns the average time and std dev for more accurate results. + get_parameters_NetMAP - Recovers parameters for a system given the guessed parameters + run_trials - Runs a set number of trials for one system, graphs curvefit result, + puts data and averages into spreadsheet, returns _bar for both types of curves + - Must include number of trials to run and name of excel sheet + + This file also imports multiple_fit_amp_phase, which performs curve fitting on Amp vs Freq and Phase vs Freq curves for all 3 masses simultaneously, + and multiple_fit_X_Y, which performs curve fitting on X vs Freq and Y vs Freq curves for all 3 masses simulatenously. +''' + +def complex_noise(n, noiselevel): + global complexamplitudenoisefactor + complexamplitudenoisefactor = 0.0005 + return noiselevel* complexamplitudenoisefactor * np.random.randn(n,) +# np.random.radn returns a number from a gaussian distribution with variance 1 and mean 0 +# noiselevel* complexamplitudenoisefactor is standard deviation + + +def syserr(x_found,x_set, absval = True): + with warnings.catch_warnings(): + warnings.simplefilter('ignore') + se = 100*(x_found-x_set)/x_set + if absval: + return abs(se) + else: + return se + +#Randomly generates parameters of a system. k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3 +def generate_random_system(): + system_params = [] + for i in range(11): + if i==7: #Doing this because we must keep force the same throughout + system_params.append(1) + elif i==4 or i==5 or i==6: + param = random.uniform(0.01,1) + round_param = round(param, 3) + system_params.append(round_param) + else: + param = random.uniform(1,10) + round_param = round(param, 3) + system_params.append(round_param) + return system_params + +#Plots data and guessed parameters curve +def plot_guess(params_guess, params_correct): + ##Create data - this is the same as what I use in the curve fit functions + freq = np.linspace(0.001, 4, 300) + + #Create noise + e = complex_noise(300, 2) + force_all = False + + #Original Data + X1 = realamp1(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + Y1 = imamp1(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + + X2 = realamp2(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + Y2 = imamp2(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + + X3 = realamp3(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + Y3 = imamp3(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + + Amp1 = curve1(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + Phase1 = theta1(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) \ + + 2 * np.pi + Amp2 = curve2(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + Phase2 = theta2(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) \ + + 2 * np.pi + Amp3 = curve3(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + Phase3 = theta3(freq, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) \ + + 2 * np.pi + + #Guessed Curve + re1_guess = re1(freq, params_guess[0], params_guess[1], params_guess[2], params_guess[3], params_guess[4], params_guess[5], params_guess[6], params_guess[7], params_guess[8], params_guess[9], params_guess[10]) + re2_guess = re2(freq, params_guess[0], params_guess[1], params_guess[2], params_guess[3], params_guess[4], params_guess[5], params_guess[6], params_guess[7], params_guess[8], params_guess[9], params_guess[10]) + re3_guess = re3(freq, params_guess[0], params_guess[1], params_guess[2], params_guess[3], params_guess[4], params_guess[5], params_guess[6], params_guess[7], params_guess[8], params_guess[9], params_guess[10]) + im1_guess = im1(freq, params_guess[0], params_guess[1], params_guess[2], params_guess[3], params_guess[4], params_guess[5], params_guess[6], params_guess[7], params_guess[8], params_guess[9], params_guess[10]) + im2_guess = im2(freq, params_guess[0], params_guess[1], params_guess[2], params_guess[3], params_guess[4], params_guess[5], params_guess[6], params_guess[7], params_guess[8], params_guess[9], params_guess[10]) + im3_guess = im3(freq, params_guess[0], params_guess[1], params_guess[2], params_guess[3], params_guess[4], params_guess[5], params_guess[6], params_guess[7], params_guess[8], params_guess[9], params_guess[10]) + c1_guess = c1(freq, params_guess[0], params_guess[1], params_guess[2], params_guess[3], params_guess[4], params_guess[5], params_guess[6], params_guess[7], params_guess[8], params_guess[9], params_guess[10]) + c2_guess = c2(freq, params_guess[0], params_guess[1], params_guess[2], params_guess[3], params_guess[4], params_guess[5], params_guess[6], params_guess[7], params_guess[8], params_guess[9], params_guess[10]) + c3_guess = c3(freq, params_guess[0], params_guess[1], params_guess[2], params_guess[3], params_guess[4], params_guess[5], params_guess[6], params_guess[7], params_guess[8], params_guess[9], params_guess[10]) + t1_guess = t1(freq, params_guess[0], params_guess[1], params_guess[2], params_guess[3], params_guess[4], params_guess[5], params_guess[6], params_guess[7], params_guess[8], params_guess[9], params_guess[10]) + t2_guess = t2(freq, params_guess[0], params_guess[1], params_guess[2], params_guess[3], params_guess[4], params_guess[5], params_guess[6], params_guess[7], params_guess[8], params_guess[9], params_guess[10]) + t3_guess = t3(freq, params_guess[0], params_guess[1], params_guess[2], params_guess[3], params_guess[4], params_guess[5], params_guess[6], params_guess[7], params_guess[8], params_guess[9], params_guess[10]) + + ## Begin graphing + fig = plt.figure(figsize=(16,11)) + gs = fig.add_gridspec(3, 3, hspace=0.25, wspace=0.05) + + ax1 = fig.add_subplot(gs[0, 0]) + ax2 = fig.add_subplot(gs[0, 1], sharex=ax1, sharey=ax1) + ax3 = fig.add_subplot(gs[0, 2], sharex=ax1, sharey=ax1) + ax4 = fig.add_subplot(gs[1, 0], sharex=ax1) + ax5 = fig.add_subplot(gs[1, 1], sharex=ax1, sharey=ax4) + ax6 = fig.add_subplot(gs[1, 2], sharex=ax1, sharey=ax4) + ax7 = fig.add_subplot(gs[2, 0], aspect='equal') + ax8 = fig.add_subplot(gs[2, 1], sharex=ax7, sharey=ax7, aspect='equal') + ax9 = fig.add_subplot(gs[2, 2], sharex=ax7, sharey=ax7, aspect='equal') + + #original data + ax1.plot(freq, X1,'ro', alpha=0.5, markersize=5.5, label = 'Data') + ax2.plot(freq, X2,'bo', alpha=0.5, markersize=5.5, label = 'Data') + ax3.plot(freq, X3,'go', alpha=0.5, markersize=5.5, label = 'Data') + ax4.plot(freq, Y1,'ro', alpha=0.5, markersize=5.5, label = 'Data') + ax5.plot(freq, Y2,'bo', alpha=0.5, markersize=5.5, label = 'Data') + ax6.plot(freq, Y3,'go', alpha=0.5, markersize=5.5, label = 'Data') + ax7.plot(X1,Y1,'ro', alpha=0.5, markersize=5.5, label = 'Data') + ax8.plot(X2,Y2,'bo', alpha=0.5, markersize=5.5, label = 'Data') + ax9.plot(X3,Y3,'go', alpha=0.5, markersize=5.5, label = 'Data') + + #inital guess curves + ax1.plot(freq, re1_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax2.plot(freq, re2_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax3.plot(freq, re3_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax4.plot(freq, im1_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax5.plot(freq, im2_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax6.plot(freq, im3_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax7.plot(re1_guess, im1_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax8.plot(re2_guess, im2_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax9.plot(re3_guess, im3_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + + #Graph parts + fig.suptitle('Trimer Resonator: Real and Imaginary', fontsize=16) + ax1.set_title('Mass 1', fontsize=14) + ax2.set_title('Mass 2', fontsize=14) + ax3.set_title('Mass 3', fontsize=14) + ax1.set_ylabel('Real') + ax4.set_ylabel('Imaginary') + ax7.set_ylabel('Imaginary') + + ax1.label_outer() + ax2.label_outer() + ax3.label_outer() + ax5.tick_params(labelleft=False) + ax6.tick_params(labelleft=False) + ax7.label_outer() + ax8.label_outer() + ax9.label_outer() + + ax4.set_xlabel('Frequency') + ax5.set_xlabel('Frequency') + ax6.set_xlabel('Frequency') + ax7.set_xlabel('Real') + ax8.set_xlabel('Real') + ax9.set_xlabel('Real') + + ax1.legend() + ax2.legend() + ax3.legend() + ax4.legend() + ax5.legend() + ax6.legend() + ax7.legend(fontsize='10') + ax8.legend(fontsize='10') + ax9.legend(fontsize='10') + + plt.show() + + ## Begin graphing + fig = plt.figure(figsize=(16,8)) + gs = fig.add_gridspec(2, 3, hspace=0.1, wspace=0.1) + ((ax1, ax2, ax3), (ax4, ax5, ax6)) = gs.subplots(sharex=True, sharey='row') + + #original data + ax1.plot(freq, Amp1,'ro', alpha=0.5, markersize=5.5, label = 'Data') + ax2.plot(freq, Amp2,'bo', alpha=0.5, markersize=5.5, label = 'Data') + ax3.plot(freq, Amp3,'go', alpha=0.5, markersize=5.5, label = 'Data') + ax4.plot(freq, Phase1,'ro', alpha=0.5, markersize=5.5, label = 'Data') + ax5.plot(freq, Phase2,'bo', alpha=0.5, markersize=5.5, label = 'Data') + ax6.plot(freq, Phase3,'go', alpha=0.5, markersize=5.5, label = 'Data') + + #inital guess curves + ax1.plot(freq, c1_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax2.plot(freq, c2_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax3.plot(freq, c3_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax4.plot(freq, t1_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax5.plot(freq, t2_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax6.plot(freq, t3_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + + + #Graph parts + fig.suptitle('Trimer Resonator: Amplitude and Phase', fontsize=16) + ax1.set_title('Mass 1', fontsize=14) + ax2.set_title('Mass 2', fontsize=14) + ax3.set_title('Mass 3', fontsize=14) + ax1.set_ylabel('Amplitude') + ax4.set_ylabel('Phase') + + for ax in fig.get_axes(): + ax.set(xlabel='Frequency') + ax.label_outer() + ax.legend() + + print(f"Graphing guessed curve with guessed parameters: {params_guess}") + + plt.show() + +#Generates random guess parameters that are within a certain percent of the true parameters +def automate_guess(true_params, percent_threshold): + params_guess = [] + for index, value in enumerate(true_params): + if index == 7: #Doing this because we must know what Force is going in + params_guess.append(value) + else: + threshold = value * (percent_threshold / 100) + num = random.uniform(value-threshold, value+threshold) + rounded_num = round(num, 4) # Round to 4 decimal places + params_guess.append(rounded_num) + return params_guess + +#Saves graphs +def save_figure(figure, folder_name, file_name): + # Create the folder if it does not exist + if not os.path.exists(folder_name): + os.makedirs(folder_name) + + # Save the figure to the folder + file_path = os.path.join(folder_name, file_name) + figure.savefig(file_path, bbox_inches = 'tight') + plt.close(figure) + +# runs > 1 if you want to run one function several times to get the average time +def timeit_function(func, args=None, kwargs=None, runs=7): + args = args or () + kwargs = kwargs or {} + + times = [] + for _ in range(runs): + t = timeit.timeit(lambda: func(*args, **kwargs), number=1) + times.append(t) + + mean_time = statistics.mean(times) + std_dev = statistics.stdev(times) if runs > 1 else 0.0 + return mean_time, std_dev, times + +def get_parameters_NetMAP(frequencies, params_guess, params_correct, e, force_all): + + #Getting the complex amplitudes (data) with a function from Trimer_simulator + #Still part of the simulation + Z1, Z2, Z3 = calculate_spectra(frequencies, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + + #Create the Zmatrix: + #This is where we begin NetMAP + trizmatrix = Zmatrix(frequencies, Z1, Z2, Z3, False) + + #Get the unnormalized parameters: + notnormparam_tri = unnormalizedparameters(trizmatrix) + + #Normalize the parameters + final_tri = normalize_parameters_1d_by_force(notnormparam_tri, 1) + # parameters vector: 'm1', 'm2', 'm3', 'b1', 'b2', 'b3', 'k1', 'k2', 'k3', 'k4', 'Driving Force' + + #Put everything into a np array + #Order added: k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3 + data_array = np.zeros(46) #44 elements are generated in this code, but I leave the last entry empty because I want to time how long it takes the function to run in other code, so I'm giving the array space to add the time if necessary + data_array[:11] += np.array(params_correct) + data_array[11:22] += np.array(params_guess) + #Adding the recovered parameters and fixing the order + data_array[22:26] += np.array(final_tri[6:10]) + data_array[26:29] += np.array(final_tri[3:6]) + data_array[29] += np.array(final_tri[-1]) + data_array[30:33] += np.array(final_tri[:3]) + #adding systematic error calculations + syserr_result = syserr(data_array[22:33], data_array[:11]) + data_array[33:44] += np.array(syserr_result) #individual errors for each parameter + data_array[-2] += np.sum(data_array[33:44]/10) #this is average error ... dividing by 10 (not 11) because we aren't counting the error in Force because the error is 0 + + return data_array + +#Runs a set number of trials for one system, graphs curvefit result, +# puts data and averages into spreadsheet, returns avg_e arrays and _bar for all types of curves +def run_trials(true_params, guessed_params, freqs_NetMAP, freqs_curvefit, length_noise_NetMAP, length_noise_curvefit, num_trials, excel_file_name, graph_folder_name): + + #Needed for calculating e_bar and for graphing - also these are things that will be returned + avg_e1_array = np.zeros(num_trials) #Polar + avg_e2_array = np.zeros(num_trials) #Cartesian + avg_e3_array = np.zeros(num_trials) #NetMAP + + #Needed to add all the data to a spreadsheet at the end + all_data1 = np.empty((0, 52)) #Polar + all_data2 = np.empty((0, 52)) #Cartesian + all_data3 = np.empty((0, 46)) #NetMAP + + #FOR ONLY when I'm running 1 trial per system: + # with pd.ExcelWriter(excel_file_name, engine='xlsxwriter') as writer: + + #Creating arrays to store the time it takes each curvefit/NetMAP function to run - will average them at the end + times_polar = np.empty(num_trials) + times_cartesian = np.empty(num_trials) + times_NetMAP = np.empty(num_trials) + + #For more than 1 trial per system: + for i in range(num_trials): + + #Create noise - noise level 2 + e = complex_noise(length_noise_curvefit, 2) + + ##For NetMAP + #create noise - noise level 2 + e_NetMAP = complex_noise(length_noise_NetMAP,2) + + #Get the data! + array1 = multiple_fit_amp_phase(guessed_params, true_params, e, freqs_curvefit, False, True, False, graph_folder_name, f'Polar_fig_{i}') #Polar, Fixed force + array2 = multiple_fit_X_Y(guessed_params, true_params, e, freqs_curvefit, False, True, graph_folder_name, f'Cartesian_fig_{i}') #Cartesian, Fixed force + array3 = get_parameters_NetMAP(freqs_NetMAP, guessed_params, true_params, e_NetMAP, False) #NetMAP + + #Time how long it takes to get the data and add the time to the larger array: + #NOTE THAT - if you are outputting graphs within the curve fitting functions, the run time will be longer than it takes to get the actual data + #that is, only use the timeit functions below when show_curvefit_graphs = False + t_polar = timeit.timeit(lambda: multiple_fit_amp_phase(guessed_params, true_params, e, freqs_curvefit, False, True, False, graph_folder_name, f'Polar_fig_{i}'), number=1) + times_polar[i] = t_polar + t_cartesian = timeit.timeit(lambda: multiple_fit_X_Y(guessed_params, true_params, e, freqs_curvefit, False, True, graph_folder_name, f'Cartesian_fig_{i}'), number=1) + times_cartesian[i] = t_cartesian + t_NetMAP = timeit.timeit(lambda: get_parameters_NetMAP(freqs_NetMAP, guessed_params, true_params, e_NetMAP, False), number=1) + times_NetMAP[i] = t_NetMAP + + #add each individual time to the array for each method so it can be stored with the data for each trial + #array1, array2, array3 to be stacked into the larger all_data arrays + array1[-1] = t_polar + array2[-1] = t_cartesian + array3[-1] = t_NetMAP + + #Pull out (average across parameters) for each trial and add to arrays for e_bar calculation later + #it is the second the last entry in the array (times is the last) + avg_e1_array[i] += array1[-2] + avg_e2_array[i] += array2[-2] + avg_e3_array[i] += array3[-2] + + #Stack each trial's data to the larger array + all_data1 = np.vstack((all_data1, array1)) + all_data2 = np.vstack((all_data2, array2)) + all_data3 = np.vstack((all_data3, array3)) + + #Calculate average time it took for each method to recover parameters, along with standard deviation + mean_time_polar = statistics.mean(times_polar) + std_dev_polar = statistics.stdev(times_polar) + mean_time_cartesian = statistics.mean(times_cartesian) + std_dev_cartesian = statistics.stdev(times_cartesian) + mean_time_NetMAP = statistics.mean(times_NetMAP) + std_dev_NetMAP = statistics.stdev(times_NetMAP) + + #Calculate average error across parameters + avg_e1_bar = math.exp(sum(np.log(avg_e1_array))/num_trials) + avg_e2_bar = math.exp(sum(np.log(avg_e2_array))/num_trials) + avg_e3_bar = math.exp(sum(np.log(avg_e3_array))/num_trials) + + + #For labeling the excel sheet + param_names = ['k1_true', 'k2_true', 'k3_true', 'k4_true', + 'b1_true', 'b2_true', 'b3_true', + 'F_true', 'm1_true', 'm2_true', 'm3_true', + 'k1_guess', 'k2_guess', 'k3_guess', 'k4_guess', + 'b1_guess', 'b2_guess', 'b3_guess', + 'F_guess', 'm1_guess', 'm2_guess', 'm3_guess', + 'k1_recovered', 'k2_recovered', 'k3_recovered', 'k4_recovered', + 'b1_recovered', 'b2_recovered', 'b3_recovered', + 'F_recovered', 'm1_recovered', 'm2_recovered', 'm3_recovered', + 'e_k1', 'e_k2', 'e_k3', 'e_k4', + 'e_b1', 'e_b2', 'e_b3', 'e_F', + 'e_m1', 'e_m2', 'e_m3', + 'Amp1_rsqrd', 'Amp2_rsqrd', 'Amp3_rsqrd', + 'Phase1_rsqrd', 'Phase2_rsqrd', 'Phase3_rsqrd', '', 'trial time'] + + #Turn the final data arrays into a dataframe so they can be written to excel + dataframe_polar = pd.DataFrame(all_data1, columns=param_names) + dataframe_cart = pd.DataFrame(all_data2, columns=param_names) + dataframe_net = pd.DataFrame(all_data3, columns=param_names[:44] + param_names[-2:]) #cutting out the 6 r-squared columns because those values can only be found for the curvefits + + #Add _bar values to data frame (one value for the whole system) + dataframe_polar.at[0,'_bar'] = avg_e1_bar + dataframe_cart.at[0,'_bar'] = avg_e2_bar + dataframe_net.at[0,'_bar'] = avg_e3_bar + + #Add the mean time and std dev to the data frame (one value each for the whole system) + dataframe_polar.at[0,'mean trial time'] = mean_time_polar + dataframe_polar.at[0,'std dev trial time'] = std_dev_polar + dataframe_cart.at[0,'mean trial time'] = mean_time_cartesian + dataframe_cart.at[0,'std dev trial time'] = std_dev_cartesian + dataframe_net.at[0,'mean trial time'] = mean_time_NetMAP + dataframe_net.at[0,'std dev trial time'] = std_dev_NetMAP + + + #FOR ONLY when I'm running 1 trial per system: + # dataframe_polar.to_excel(writer, sheet_name='Amp & Phase', index=False) + # dataframe_cart.to_excel(writer, sheet_name='X & Y', index=False) + # dataframe_net.to_excel(writer, sheet_name='NetMAP', index=False) + + # return avg_e1_array, avg_e2_array, avg_e3_array, avg_e1_bar, avg_e2_bar, avg_e3_bar + + #For more than 1 trial per system: + return avg_e1_array, avg_e2_array, avg_e3_array, avg_e1_bar, avg_e2_bar, avg_e3_bar, dataframe_polar, dataframe_cart, dataframe_net + + +''' Begin work here. Case Study. +Randomly generate a system, then graph the data (no noise) and make a guess of parameters based on visual accuracy of the curve. +Use this guess to curvefit to the data. NetMAP does not require this initial guess to function.''' + +# #Make parameters/initial guesses - [k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3] +# #Note that right now we only scale/fix by F, so make sure to keep F correct in guesses +# true_params = generate_random_system() +# guessed_params = [1,1,1,1,1,1,1,1,1,1,1] + +# # Start the loop +# while True: +# # Graph +# plot_guess(guessed_params, true_params) + +# # Ask the user for the new list of guessed parameters +# print(f'Current list of parameter guesses is {guessed_params}') +# indices = input("Enter the indices of the elements you want to update (comma-separated, or 'c' to continue to curve fit): ") + +# # Check if the user wants to quit +# if indices.lower() == 'c': +# break + +# # Parse and validate the indices +# try: +# index_list = [int(idx.strip()) for idx in indices.split(',')] +# if any(index < 0 or index >= len(guessed_params) for index in index_list): +# print(f"Invalid indices. Please enter values between 0 and {len(guessed_params)-1}.") +# continue +# except ValueError: +# print("Invalid input. Please enter valid indices or 'c' to continue to curve fit.") +# continue + +# # Ask the user for the new values +# values = input(f"Enter the new values for indices {index_list} (comma-separated): ") + +# # Parse and validate the new values +# try: +# value_list = [float(value.strip()) for value in values.split(',')] +# if len(value_list) != len(index_list): +# print("The number of values must match the number of indices.") +# continue +# except ValueError: +# print("Invalid input. Please enter valid numbers.") +# continue + +# # Update the list with the new values +# for index, new_value in zip(index_list, value_list): +# guessed_params[index] = new_value + +# #Curve fit with the guess made above and get average lists +# #Will not do anything with _bar for a single case study +# freqs_NetMAP = np.linspace(0.001, 4, 10) +# freqs_curvefit = np.linspace(0.001, 4, 10) +# length_noise_NetMAP = 10 +# length_noise_curvefit = 10 +# avg_e1_array, avg_e2_array, avg_e3_array, avg_e1_bar, avg_e2_bar, avg_e3_bar = run_trials(true_params, guessed_params, freqs_NetMAP, freqs_curvefit, length_noise_NetMAP, length_noise_curvefit 10, 'Case_Study.xlsx', 'Case Study Plots') + +# #Graph histogram of for curve fits + +# plt.title('Average Systematic Error Across Parameters') +# plt.xlabel('') +# plt.ylabel('Counts') +# plt.hist(avg_e2_array, alpha=0.5, color='green', label='Cartesian (X & Y)', edgecolor='black') +# plt.hist(avg_e1_array, alpha=0.5, color='blue', label='Polar (Amp & Phase)', edgecolor='black') +# plt.hist(avg_e3_array, bins=50, alpha=0.5, color='red', label='NetMAP', edgecolor='black') +# plt.legend(loc='upper center') + +# plt.show() + +''' Begin work here. Automated guesses. Multiple systems. +Instead of manually guessing the intial parameters, guess is generated to be within a certain percentage of the true parameters. +Error across trials and across parameters is calculated. Error across parameters is graphed (e_bar) at the end to visualize error for all the systems on one graph.''' + +# avg_e1_bar_list = [] +# avg_e2_bar_list = [] +# avg_e3_bar_list = [] + +# for i in range(15): + +# #Generate system and guess parameters +# true_params = generate_random_system() +# guessed_params = automate_guess(true_params, 20) + +# #Curve fit with the guess made above +# freqs_NetMAP = np.linspace(0.001, 4, 10) +# length_noise = 10 +# avg_e1_array, avg_e2_array, avg_e3_array, avg_e1_bar, avg_e2_bar, avg_e3_bar = run_trials(true_params, guessed_params, freqs_NetMAP, length_noise, 50, f'Random_Automated_Guess_{i}.xlsx', f'Sys {i} - Rand Auto Guess Plots') + +# #Add _bar to lists to make one graph at the end +# avg_e1_bar_list.append(avg_e1_bar) #Polar +# avg_e2_bar_list.append(avg_e2_bar) #Cartesian +# avg_e3_bar_list.append(avg_e3_bar) #NetMAP + +# #Graph histogram of for curve fits +# fig = plt.figure(figsize=(10, 6)) +# plt.title('Average Systematic Error Across Parameters') +# plt.xlabel('') +# plt.ylabel('Counts') +# plt.hist(avg_e2_array, alpha=0.5, color='green', label='Cartesian (X & Y)', edgecolor='black') +# plt.hist(avg_e1_array, alpha=0.5, color='blue', label='Polar (Amp & Phase)', edgecolor='black') +# plt.hist(avg_e3_array, bins=50, alpha=0.5, color='red', label='NetMAP', edgecolor='black') +# plt.legend(loc='upper center') + +# plt.show() +# save_figure(fig, f'Sys {i} - Rand Auto Guess Plots', ' Histogram ' ) + +# #Graph histogram of _bar for both curve fits +# fig = plt.figure(figsize=(10, 6)) + +# # if max(avg_e2_bar_list) >= min(avg_e1_bar_list): +# plt.hist(avg_e2_bar_list, bins=10, alpha=0.75, color='green', label='Cartesian (X & Y)', edgecolor='black') +# plt.hist(avg_e1_bar_list, bins=10, alpha=0.75, color='blue', label='Polar (Amp & Phase)', edgecolor='black') +# plt.hist(avg_e3_bar_list, bins=10, alpha=0.75, color='red', label='NetMAP', edgecolor='black') +# plt.title('Average Error Across Parameters Then Across Trials') +# plt.xlabel(' (%)') +# plt.ylabel('Counts') +# plt.legend(loc='upper center') + +# plt.show() +# fig.savefig('_bar_Histogram.png') + +''' Begin work here. Checking Worst System - System 0 from 15 Systems - 10 Freqs NetMAP. +Running the system with no noise to understand why recovered error was so bad. +''' + +## System 0 from 15 Systems - 10 Freqs NetMAP +## Expecting there to be no error in recovery for everything +# true_parameters = [1.045, 0.179, 3.852, 1.877, 5.542, 1.956, 3.71, 1, 3.976, 0.656, 3.198] +# guessed_parameters = [1.2379, 0.1764, 3.7327, 1.8628, 5.93, 2.1793, 4.2198, 1, 4.3335, 0.7016, 3.0719] + +# #Run the trials with 0 error +# # MUST CHANGE ERROR IN run_trials AND IN get_parameters_NetMAP +# freqs_NetMAP = np.linspace(0.001, 4, 10) +# length_noise = 0 +# avg_e1_array, avg_e2_array, avg_e3_array, avg_e1_bar, avg_e2_bar, avg_e3_bar = run_trials(true_parameters, guessed_parameters, freqs_NetMAP, length_noise, 50, 'Sys0_No_Error.xlsx', 'Sys0_No_Error - Plots') + +# #Plot histogram +# plt.title('Average Systematic Error Across Parameters') +# plt.xlabel('') +# plt.ylabel('Counts') +# plt.hist(avg_e2_array, alpha=0.5, color='green', label='Cartesian (X & Y)', edgecolor='black') +# plt.hist(avg_e1_array, alpha=0.5, color='blue', label='Polar (Amp & Phase)', edgecolor='black') +# plt.hist(avg_e3_array, bins=50, alpha=0.5, color='red', label='NetMAP', edgecolor='black') +# plt.legend(loc='upper center') +# plt.show() +# plt.savefig('_Histogram_Sys0_no_error.png') + +'''Begin work here. Redoing Case Study - 10 Freqs Better Params with 1000 trials instead of 50 ''' +'''Additionally, I am going to use the same frequencies for all three methods of parameter recovery: + 300 or 10 evenly spaced frequencies from 0.001 to 4.''' +''' Note that all information saves to the same folder that this code is located in.''' + +# #Recover the system information from a file on my computer +# file_path = '/Users/Student/Desktop/Summer Research 2024/Curve Fit vs NetMAP/Case Study - 10 Freqs NetMAP & Better Parameters/Case_Study_10_Freqs_Better_Parameters.xlsx' +# array_amp_phase = pd.read_excel(file_path, sheet_name = 'Amp & Phase').to_numpy() +# array_X_Y = pd.read_excel(file_path, sheet_name = 'X & Y').to_numpy() + +# #These are the true and the guessed parameters for the system +# #Guessed parameters were the same ones guesssed by hand the first time we ran this case study +# true_params = np.concatenate((array_amp_phase[1,:7], [array_amp_phase[1,10]], array_amp_phase[1,7:10])) +# guessed_params = np.concatenate((array_amp_phase[1,11:18], [array_amp_phase[1,21]], array_amp_phase[1,18:21])) + +# #Create the frequencies that both NetMAP and the Curvefitting functions require +# #Note that if the number of frequencies are not the same, the noise must be adjusted +# # freq_curvefit = np.linspace(0.001, 4, 300) +# freq_curvefit = np.linspace(0.001, 4, 10) +# freqs_NetMAP = np.linspace(0.001, 4, 10) +# length_noise_curvefit = 10 +# length_noise_NetMAP = 10 + +# #Run the trials (1000 in this case) +# #Currently saves saves all plots to a folder called "Case Study 1000 Trials Same Frequencies Plots" +# #(the excel name is not used here - it is only required when doing multiple systems with one trial per system) +# #returns average error across trials (e_bar) and parameters (e), and dataframes for all three methods that include all the information +# #there is only one e_bar for each when doing a case study, so it will not be used +# #NOTE: error is different every time, to simulate a real experiment +# avg_e1_array, avg_e2_array, avg_e3_array, avg_e1_bar, avg_e2_bar, avg_e3_bar, dataframe_polar, dataframe_cart, dataframe_net = run_trials(true_params, guessed_params, freqs_NetMAP, freq_curvefit, length_noise_NetMAP, length_noise_curvefit, 1000, 'Second_Case_Study_1000_Trials_10_Frequencies.xlsx', 'Second Case Study 1000 Trials 10 Frequencies Plots') + +# #Save the new data to a new excel spreadsheet: +# with pd.ExcelWriter('Second_Case_Study_1000_Trials_10_Frequencies.xlsx', engine='xlsxwriter') as writer: +# dataframe_polar.to_excel(writer, sheet_name='Amp & Phase', index=False) +# dataframe_cart.to_excel(writer, sheet_name='X & Y', index=False) +# dataframe_net.to_excel(writer, sheet_name='NetMAP', index=False) + +# #Graph lin and log histograms of for both curve fits: + +# #Compute max of data and set the bin limits so all data is seen/included on graph +# data_max = max(avg_e1_array + avg_e2_array + avg_e3_array) +# if data_max > 39: +# linearbins = np.linspace(0, data_max + 2,50) +# else: +# linearbins = np.linspace(0, 40, 50) + +# #Graph linear plots +# fig = plt.figure(figsize=(5, 4)) +# plt.xlabel(' Bar (%)', fontsize = 16) +# plt.ylabel('Counts', fontsize = 16) +# plt.yticks(fontsize=14) +# plt.xticks(fontsize=14) +# plt.hist(avg_e1_array, bins = linearbins, alpha=0.5, color='blue', label='Polar', edgecolor='blue') +# plt.hist(avg_e2_array, bins = linearbins, alpha=0.5, color='green', label='Cartesian', edgecolor='green') +# plt.hist(avg_e3_array, bins = linearbins, alpha=0.5, color='red', label='NetMAP', edgecolor='red') +# plt.legend(loc='best', fontsize = 13) + +# plt.show() +# save_figure(fig, 'Second Case Study 1000 Trials 10 Frequencies', 'Linear Histogram') + +# # Set the bin limits so all data is seen/included on graph +# if data_max > 100: +# logbins = np.logspace(-2, math.log10(data_max)+0.25, 50) +# else: +# logbins = np.logspace(-2, 1.8, 50) + +# #Graph log! +# fig = plt.figure(figsize=(5, 4)) +# plt.xlabel(' Bar (%)', fontsize = 16) +# plt.ylabel('Counts', fontsize = 16) +# plt.xscale('log') +# plt.yticks(fontsize=14) +# plt.xticks(fontsize=14) +# plt.hist(avg_e1_array, bins = logbins, alpha=0.5, color='blue', label='Polar', edgecolor='blue') +# plt.hist(avg_e2_array, bins = logbins, alpha=0.5, color='green', label='Cartesian', edgecolor='green') +# plt.hist(avg_e3_array, bins = logbins, alpha=0.5, color='red', label='NetMAP', edgecolor='red') +# plt.legend(loc='best', fontsize = 13) + +# plt.show() +# save_figure(fig, 'Second Case Study 1000 Trials 10 Frequencies', 'Logarithmic Histogram') + + +'''Begin work here. Case Study - 10 Freqs Better Params with 1000 trials + GOAL: graph runtime versus number of frequencies given to each method. + Create a for loop that varies frequencies from 2 to 300. (2 because that is the minimum required by NetMAP. 300 because that produces a very nice graph for curvefitting (and is what I have been using as a standard up until now.''' + +# #Recover the system information from a file on my computer +# file_path = '/Users/Student/Desktop/Summer Research 2024/Curve Fit vs NetMAP/Case Study - 10 Freqs NetMAP & Better Parameters/Case_Study_10_Freqs_Better_Parameters.xlsx' +# array_amp_phase = pd.read_excel(file_path, sheet_name = 'Amp & Phase').to_numpy() + +# #These are the true and the guessed parameters for the system +# #Guessed parameters were the same ones guesssed by hand the first time we ran this case study +# true_params = np.concatenate((array_amp_phase[1,:7], [array_amp_phase[1,10]], array_amp_phase[1,7:10])) +# guessed_params = np.concatenate((array_amp_phase[1,11:18], [array_amp_phase[1,21]], array_amp_phase[1,18:21])) + +# #create array to store the run times for the given number of frequencies +# #there will be a total of 98 different times since we start with 2 frequencies and end with 100 +# run_times_polar = np.zeros(99) +# run_times_cartesian = np.zeros(99) +# run_times_NetMAP = np.zeros(99) + +# #used for graphing (below for loop) +# num_freq = np.arange(2,101,1) #arange does not include the "stop" number, so the array goes from 2 to 100 + +# #loop to change which frequency is used to recover parameters +# for i in range(0,99): #range does not include the "stop" number, so the index actually goes up to 98 +# #Create the frequencies that both NetMAP and the Curvefitting functions require +# #Frequencies are values between 0.001 and 4, evenly spaced depending on how many frequencies we use +# #Note that the number of frequencies must match the length of the noise +# #minimum 2 frequencies required - max of 300 because that how high I was going before (gives a very good curve for curvefit) +# freq_curvefit = np.linspace(0.001, 4, i+2) +# freqs_NetMAP = np.linspace(0.001, 4, i+2) +# length_noise_curvefit = i+2 +# length_noise_NetMAP = i+2 + +# #Run the trials (1000 in this case) +# #Currently saves saves all plots to a folder called "Case Study 1000 Trials Varying Frequencies Plots" +# #(the excel name is not used here - it is only required when doing multiple systems with one trial per system) +# #returns average error across trials (e_bar) and parameters (e), and dataframes for all three methods that include all the information +# #there is only one e_bar for each when doing a case study, so those arrays will not be used in any graphing moving forward +# #NOTE: error is different every time, to simulate a real experiment +# avg_e1_array, avg_e2_array, avg_e3_array, avg_e1_bar, avg_e2_bar, avg_e3_bar, dataframe_polar, dataframe_cart, dataframe_net = run_trials(true_params, guessed_params, freqs_NetMAP, freq_curvefit, length_noise_NetMAP, length_noise_curvefit, 50, f'Second_Case_Study_50_Trials_{i+2}_Frequencies.xlsx', f'Second Case Study 50 Trials {i+2} Frequencies Plots') + +# #Save the new data to a new excel spreadsheet: +# with pd.ExcelWriter(f'Case_Study_50_Trials_{i+2}_Frequencies.xlsx', engine='xlsxwriter') as writer: +# dataframe_polar.to_excel(writer, sheet_name='Amp & Phase', index=False) +# dataframe_cart.to_excel(writer, sheet_name='X & Y', index=False) +# dataframe_net.to_excel(writer, sheet_name='NetMAP', index=False) + +# #The run times are stored in the dataframes, so we extract the mean here and add it to the run_times arrays so we can graph it later +# run_times_polar[i] = dataframe_polar.at[0,'mean trial time'] +# run_times_cartesian[i] = dataframe_cart.at[0,'mean trial time'] +# run_times_NetMAP[i] = dataframe_net .at[0,'mean trial time'] + +# print(f"Frequency {i+2} Complete") + +''' Graphing the above didn't work, so I'm doing it again below ''' + +run_times_polar = np.zeros(99) +run_times_cartesian = np.zeros(99) +run_times_NetMAP = np.zeros(99) +std_dev_time_polar = np.zeros(99) +std_dev_time_cartesian = np.zeros(99) +std_dev_time_NetMAP = np.zeros(99) +num_freq = np.arange(2,101,1) + +for i in range(99): + file_path = f'/Users/Student/Desktop/Summer Research 2024/Curve Fit vs NetMAP/Case Study - Number of Frequencies vs Average Run Time/50 Trials/Case_Study_50_Trials_{i+2}_Frequencies.xlsx' + polar = pd.read_excel(file_path, sheet_name = 'Amp & Phase').to_numpy() + cartesian = pd.read_excel(file_path, sheet_name = 'X & Y').to_numpy() + NetMAP = pd.read_excel(file_path, sheet_name = 'NetMAP').to_numpy() + + run_times_polar[i] = polar[0,53] + run_times_cartesian[i] = cartesian[0,53] + run_times_NetMAP[i] = NetMAP[0,47] + std_dev_time_polar[i] = polar[0,54] + std_dev_time_cartesian[i] = cartesian[0,54] + std_dev_time_NetMAP[i] = NetMAP[0,48] + + +#Plot number of frequencies versus run time: +fig = plt.figure(figsize=(5, 4)) +plt.xlabel('Number of Frequencies', fontsize = 16) +plt.ylabel('Mean Time to Run (s)', fontsize = 16) +plt.yticks(fontsize=14) +plt.xticks(fontsize=14) +plt.yscale('log') +plt.plot(num_freq, run_times_polar, 'o', color='blue', label='Polar') +plt.plot(num_freq, run_times_cartesian, 'o', color='green', label='Cartesian') +plt.plot(num_freq, run_times_NetMAP, 'o', color='red', label='NetMAP') +plt.legend(loc='best', fontsize = 13) +plt.show() + + +fig = plt.figure(figsize=(5, 4)) +plt.xlabel('Number of Frequencies', fontsize = 16) +plt.ylabel('Mean Time to Run (s)', fontsize = 16) +plt.yticks(fontsize=14) +plt.xticks(fontsize=14) +plt.yscale('log') +plt.plot(num_freq, run_times_polar, 'o', color='blue', label='Polar') +plt.legend(loc='best', fontsize = 13) +plt.show() + +fig = plt.figure(figsize=(5, 4)) +plt.xlabel('Number of Frequencies', fontsize = 16) +plt.ylabel('Mean Time to Run (s)', fontsize = 16) +plt.yticks(fontsize=14) +plt.xticks(fontsize=14) +plt.yscale('log') +plt.plot(num_freq, run_times_cartesian, 'o', color='green', label='Cartesian') +plt.legend(loc='best', fontsize = 13) +plt.show() + +fig = plt.figure(figsize=(5, 4)) +plt.xlabel('Number of Frequencies', fontsize = 16) +plt.ylabel('Mean Time to Run (s)', fontsize = 16) +plt.yticks(fontsize=14) +plt.xticks(fontsize=14) +plt.plot(num_freq, run_times_NetMAP, 'o', color='red', label='NetMAP') +plt.legend(loc='best', fontsize = 13) +plt.show() + + +# polar_outliers = run_times_polar[run_times_polar > 20] +# cartesian_outliers = run_times_cartesian[run_times_cartesian > 20] +# polar_outlier_indices = np.nonzero(run_times_polar > 20) +# cartesian_outlier_indices = np.nonzero(run_times_cartesian > 20) + +# no_outliers_polar_times = np.empty +# no_outliers_cartesian_times = np.empty +# new_freq_polar = np.empty + +# for i in range(len(run_times_polar)): +# if run_times_polar[i] not in polar_outliers: +# no_outliers_polar_times[i] = run_times_polar[i] +# if run_times_cartesian[i] not in cartesian_outliers: +# no_outliers_cartesian_times[i] = run_times_cartesian[i] +# if i not in polar_outlier_indices: + + + + +'''Begin work here. Redoing 15 systems data. Still using 10 Freqs and Better Params. + I want to run parameter recovery for many more systems but only 1 trial per system. + Seeing how many systems it can do in 2 hours or 2000 systems.''' + + +## 1. What am I doing for error? + ## 300 frequencies (n=300 -- so 300 different noises for each frequency used) and noise level 2 + ## 10 evenly spaced frequencies for NetMAP (n=10) and noise level 2. +## 2. Set a runtime limit of 2-3 hours. DONE +## 3. Don't graph all the curvefits. DONE +## 4. Guesses are automated to within 20% of generated parameters, 10 evenly spaced frequencies for NetMAP + + +# # Set the time limit in seconds +# time_limit = 14400 # 4 hours + +# # Record the start time +# start_time = time.time() + +# # Compile a list of all the e bars so we can graph at the end +# avg_e_bar_list_polar = [] +# avg_e_bar_list_cartesian = [] +# avg_e_bar_list_NetMAP = [] + +# # Initialize an array so I can put each system into one spreadsheet since I'm only doing one trial per system +# all_data1 = pd.DataFrame() #Polar +# all_data2 = pd.DataFrame() #Cartesian +# all_data3 = pd.DataFrame() #NetMAP + +# for i in range(2000): + +# # Check if the time limit has been exceeded +# elapsed_time = time.time() - start_time +# if elapsed_time > time_limit: +# print("Time limit exceeded. Exiting loop.") +# break + +# loop_start_time = time.time() + +# #Generate system and guess parameters +# true_params = generate_random_system() +# guessed_params = automate_guess(true_params, 20) + +# #Curve fit with the guess made above +# freqs_NetMAP = np.linspace(0.001, 4, 10) +# length_noise = 10 +# avg_e1_array, avg_e2_array, avg_e3_array, avg_e1_bar, avg_e2_bar, avg_e3_bar, dataframe_polar, dataframe_cart, dataframe_net = run_trials(true_params, guessed_params, freqs_NetMAP, length_noise, 1, f'System_{i+1}_1.xlsx', f'Sys {i+1} - Rand Auto Guess Plots') + +# #Add each system data to one big dataframe so I can store everything in the same spreadsheet + +# all_data1 = pd.concat([all_data1, dataframe_polar], ignore_index=True) +# all_data2 = pd.concat([all_data2, dataframe_cart], ignore_index=True) +# all_data3 = pd.concat([all_data3, dataframe_net], ignore_index=True) + +# #Add _bar to lists to make one graph at the end +# avg_e_bar_list_polar.append(avg_e1_bar) #Polar +# avg_e_bar_list_cartesian.append(avg_e2_bar) #Cartesian +# avg_e_bar_list_NetMAP.append(avg_e3_bar) #NetMAP + +# ## FOR NOW - don't need this either + +# # # Compute max of data and set the bin limits so all data is included on graph +# # data_max1 = max(avg_e2_array + avg_e1_array + avg_e3_array) +# # if data_max1 > 39: +# # linearbins = np.linspace(0, data_max1 + 2,50) +# # else: +# # linearbins = np.linspace(0, 40, 50) + +# # #Graph histogram of for curve fits - linear +# # fig = plt.figure(figsize=(5, 4)) +# # # plt.title('Average Systematic Error Across Parameters') +# # plt.xlabel(' (%)', fontsize = 16) +# # plt.ylabel('Counts', fontsize = 16) +# # plt.yticks(fontsize=14) +# # plt.xticks(fontsize=14) +# # plt.hist(avg_e2_array, bins = linearbins, alpha=0.5, color='green', label='Cartesian (X & Y)', edgecolor='green') +# # plt.hist(avg_e1_array, bins = linearbins, alpha=0.5, color='blue', label='Polar (Amp & Phase)', edgecolor='blue') +# # plt.hist(avg_e3_array, bins = linearbins, alpha=0.5, color='red', label='NetMAP', edgecolor='red') +# # plt.legend(loc='best', fontsize = 13) + +# # # plt.show() +# # save_figure(fig, 'More Systems 1 Trial - Histograms', f' Lin Hist System {i+1}') + +# # # Set the bin limits so all data is included on graph +# # if data_max > 100: +# # logbins = np.logspace(-2, math.log10(data_max), 50) +# # else: +# # logbins = np.logspace(-2, 1.8, 50) +# # #Graph histogram of for curve fits - log +# # fig = plt.figure(figsize=(5, 4)) +# # # plt.title('Average Systematic Error Across Parameters') +# # plt.xlabel(' (%)', fontsize = 16) +# # plt.ylabel('Counts', fontsize = 16) +# # plt.xscale('log') +# # plt.yticks(fontsize=14) +# # plt.xticks(fontsize=14) +# # plt.hist(avg_e2_array, bins = logbins, alpha=0.5, color='green', label='Cartesian (X & Y)', edgecolor='green') +# # plt.hist(avg_e1_array, bins = logbins, alpha=0.5, color='blue', label='Polar (Amp & Phase)', edgecolor='blue') +# # plt.hist(avg_e3_array, bins = logbins, alpha=0.5, color='red', label='NetMAP', edgecolor='red') +# # plt.legend(loc='best', fontsize = 13) + +# # # plt.show() +# # save_figure(fig, 'More Systems 1 Trial - Histograms', f' Log Hist System {i+1}') + +# loop_end_time = time.time() +# loop_time = loop_end_time - loop_start_time + +# print(f"Iteration {i + 1} completed. Loop time: {loop_time} secs ") + +# #Write the data for each system (which is now in one big dataframe) to excel +# with pd.ExcelWriter('All_Systems_1_Trial_2.xlsx') as writer: +# all_data1.to_excel(writer, sheet_name='Polar', index=False) +# all_data2.to_excel(writer, sheet_name='Cartesian', index=False) +# all_data3.to_excel(writer, sheet_name='NetMAP', index=False) + + +# #Graph histogram of _bar for both curve fits + +# # Compute max of data and set the bin limits so all data is included on graph +# data_max = max(avg_e_bar_list_cartesian + avg_e_bar_list_polar + avg_e_bar_list_NetMAP) +# if data_max > 39: +# linearbins = np.linspace(0, data_max + 2,50) +# else: +# linearbins = np.linspace(0, 40, 50) + +# #Graph linear! +# fig = plt.figure(figsize=(5, 4)) +# plt.xlabel(' Bar (%)', fontsize = 16) +# plt.ylabel('Counts', fontsize = 16) +# plt.yticks(fontsize=14) +# plt.xticks(fontsize=14) +# plt.hist(avg_e_bar_list_cartesian, bins = linearbins, alpha=0.5, color='green', label='Cartesian', edgecolor='green') +# plt.hist(avg_e_bar_list_polar, bins = linearbins, alpha=0.5, color='blue', label='Polar', edgecolor='blue') +# plt.hist(avg_e_bar_list_NetMAP, bins = linearbins, alpha=0.5, color='red', label='NetMAP', edgecolor='red') +# plt.legend(loc='best', fontsize = 13) + +# plt.show() +# save_figure(fig, 'More Systems 1 Trial - Histograms', ' Bar Lin Hist 2' ) + +# # Set the bin limits so all data is included on graph +# if data_max > 100: +# logbins = np.logspace(-2, math.log10(data_max)+0.25, 50) +# else: +# logbins = np.logspace(-2, 1.8, 50) + +# #Graph log! +# fig = plt.figure(figsize=(5, 4)) +# plt.xlabel(' Bar (%)', fontsize = 16) +# plt.ylabel('Counts', fontsize = 16) +# plt.xscale('log') +# plt.yticks(fontsize=14) +# plt.xticks(fontsize=14) +# plt.hist(avg_e_bar_list_cartesian, bins = logbins, alpha=0.5, color='green', label='Cartesian', edgecolor='green') +# plt.hist(avg_e_bar_list_polar, bins = logbins, alpha=0.5, color='blue', label='Polar', edgecolor='blue') +# plt.hist(avg_e_bar_list_NetMAP, bins = logbins, alpha=0.5, color='red', label='NetMAP', edgecolor='red') +# plt.legend(loc='best', fontsize = 13) + +# plt.show() +# save_figure(fig, 'More Systems 1 Trial - Histograms', ' Bar Log Hist 2' ) + +# # End time +# end_time = time.time() +# print(f"Time Elapsed: {end_time - start_time} secs -- {(end_time - start_time)/3600} hrs") + diff --git a/trimer/curve_fitting_X_Y_all.py b/trimer/curve_fitting_X_Y_all.py new file mode 100644 index 0000000..d902c25 --- /dev/null +++ b/trimer/curve_fitting_X_Y_all.py @@ -0,0 +1,293 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Tue Jul 16 11:31:59 2024 + +@author: lydiabullock +""" +import os +import numpy as np +import matplotlib.pyplot as plt +import lmfit +import warnings +from Trimer_simulator import re1, re2, re3, im1, im2, im3, realamp1, realamp2, realamp3, imamp1, imamp2, imamp3 + +''' 3 functions contained: + multiple_fit - Curve fits to multiple Real and Imaginary Curves at once + - Calculates systematic error and returns a dictionary of info + - Graphs curve fit analysis + residuals - calculates residuals of multiple data sets and concatenates them + - used in multiple_fit function to minimize the residuals of + multiple graphs at the same time to find the best fit curve + save_figure - saves the curve fit graph created to a named folder + syserr - calculates systematic error + rsqrd - calculates R^2 +''' + +def syserr(x_found, x_set, absval = True): + with warnings.catch_warnings(): + warnings.simplefilter('ignore') + se = 100*(x_found-x_set)/x_set + if absval: + return abs(se) + else: + return se + +""" +This definition of R^2 can come out negative. +Negative means that a flat line would fit the data better than the curve. +""" +def rsqrd(model, data, plot=False, x=None, newfigure = True): + SSres = sum((data - model)**2) + SStot = sum((data - np.mean(data))**2) + rsqrd = 1 - (SSres/ SStot) + + if plot: + if newfigure: + plt.figure() + plt.plot(x,data, 'o') + plt.plot(x, model, '--') + + return rsqrd + +#Get residuals +def residuals(params, wd, X1_data, X2_data, X3_data, Y1_data, Y2_data, Y3_data): + k1 = params['k1'].value + k2 = params['k2'].value + k3 = params['k3'].value + k4 = params['k4'].value + b1 = params['b1'].value + b2 = params['b2'].value + b3 = params['b3'].value + F = params['F'].value + m1 = params['m1'].value + m2 = params['m2'].value + m3 = params['m3'].value + + modelre1 = re1(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + modelre2 = re2(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + modelre3 = re3(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + modelim1 = im1(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + modelim2 = im2(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + modelim3 = im3(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) + + residX1 = X1_data - modelre1 + residX2 = X2_data - modelre2 + residX3 = X3_data - modelre3 + residY1 = Y1_data - modelim1 + residY2 = Y2_data - modelim2 + residY3 = Y3_data - modelim3 + + return np.concatenate((residX1, residX2, residX3, residY1, residY2, residY3)) + +def save_figure(figure, folder_name, file_name): + # Create the folder if it does not exist + if not os.path.exists(folder_name): + os.makedirs(folder_name) + + # Save the figure to the folder + file_path = os.path.join(folder_name, file_name) + figure.savefig(file_path) + plt.close(figure) + +#Takes in a *list* of correct parameters and a *list* of the guessed parameters, +#as well as error and three booleans (whether you want to apply force to one or all masses, +#scale by force, or fix the force) +# +#Returns a dataframe containing guessed parameters, recovered parameters, +#and systematic error +def multiple_fit_X_Y(params_guess, params_correct, e, freq, force_all, fix_F, graph_folder_name, graph_name, show_curvefit_graphs = False): + + ##Put params_guess and params_correct into np array + #Order added: k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3 + data_array = np.zeros(52) #50 elements are generated in this code, but I leave the last entry empty because I want to time how long it takes the function to run in other code, so I'm giving the array space to add the time if necessary + data_array[:11] += np.array(params_correct) + data_array[11:22] += np.array(params_guess) + + + ##Create data - functions from simulator code + + X1 = realamp1(freq, data_array[0], data_array[1], data_array[2], data_array[3], data_array[4], data_array[5], data_array[6], data_array[7], data_array[8], data_array[9], data_array[10], e, force_all) + Y1 = imamp1(freq, data_array[0], data_array[1], data_array[2], data_array[3], data_array[4], data_array[5], data_array[6], data_array[7], data_array[8], data_array[9], data_array[10], e, force_all) + + X2 = realamp2(freq, data_array[0], data_array[1], data_array[2], data_array[3], data_array[4], data_array[5], data_array[6], data_array[7], data_array[8], data_array[9], data_array[10], e, force_all) + Y2 = imamp2(freq, data_array[0], data_array[1], data_array[2], data_array[3], data_array[4], data_array[5], data_array[6], data_array[7], data_array[8], data_array[9], data_array[10], e, force_all) + + X3 = realamp3(freq, data_array[0], data_array[1], data_array[2], data_array[3], data_array[4], data_array[5], data_array[6], data_array[7], data_array[8], data_array[9], data_array[10], e, force_all) + Y3 = imamp3(freq, data_array[0], data_array[1], data_array[2], data_array[3], data_array[4], data_array[5], data_array[6], data_array[7], data_array[8], data_array[9], data_array[10], e, force_all) + + #Create intial parameters + params = lmfit.Parameters() + params.add('k1', value = data_array[11], min=0) + params.add('k2', value = data_array[12], min=0) + params.add('k3', value = data_array[13], min=0) + params.add('k4', value = data_array[14], min=0) + params.add('b1', value = data_array[15], min=0) + params.add('b2', value = data_array[16], min=0) + params.add('b3', value = data_array[17], min=0) + params.add('F', value = data_array[18], min=0) + params.add('m1', value = data_array[19], min=0) + params.add('m2', value = data_array[20], min=0) + params.add('m3', value = data_array[21], min=0) + + #If you plan on fixing F so it cannot be changed + if fix_F: + params['F'].vary = False + + + #get resulting data and fit parameters by minimizing the residuals + result = lmfit.minimize(residuals, params, args = (freq, X1, X2, X3, Y1, Y2, Y3)) + #print(lmfit.fit_report(result)) + + ##Add recovered parameters and systematic error + #Order added: k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3 + param_values = np.array([result.params[param].value for param in result.params]) + data_array[22:33] += param_values + + if fix_F == False: + scaling_factor = (data_array[7])/(result.params['F'].value) + data_array[22:33] *= scaling_factor + + syserr_result = syserr(data_array[22:33], data_array[:11]) + data_array[33:44] += np.array(syserr_result) + + #average error + data_array[-1] += np.sum(data_array[33:44]/10) #dividing by 10 because we aren't counting the error in Force because it is 0 + + #Create fitted y-values (for rsqrd and graphing) + k1_fit = data_array[22] + k2_fit = data_array[23] + k3_fit = data_array[24] + k4_fit = data_array[25] + b1_fit = data_array[26] + b2_fit = data_array[27] + b3_fit = data_array[28] + F_fit = data_array[29] + m1_fit = data_array[30] + m2_fit = data_array[31] + m3_fit= data_array[32] + + X1_fitted = re1(freq, k1_fit, k2_fit, k3_fit, k4_fit, b1_fit, b2_fit, b3_fit, F_fit, m1_fit, m2_fit, m3_fit) + X2_fitted = re2(freq, k1_fit, k2_fit, k3_fit, k4_fit, b1_fit, b2_fit, b3_fit, F_fit, m1_fit, m2_fit, m3_fit) + X3_fitted = re3(freq, k1_fit, k2_fit, k3_fit, k4_fit, b1_fit, b2_fit, b3_fit, F_fit, m1_fit, m2_fit, m3_fit) + Y1_fitted = im1(freq, k1_fit, k2_fit, k3_fit, k4_fit, b1_fit, b2_fit, b3_fit, F_fit, m1_fit, m2_fit, m3_fit) + Y2_fitted = im2(freq, k1_fit, k2_fit, k3_fit, k4_fit, b1_fit, b2_fit, b3_fit, F_fit, m1_fit, m2_fit, m3_fit) + Y3_fitted = im3(freq, k1_fit, k2_fit, k3_fit, k4_fit, b1_fit, b2_fit, b3_fit, F_fit, m1_fit, m2_fit, m3_fit) + + #Calculate R^2 and add to data_array + X1_rsqrd = rsqrd(X1_fitted, X1) + X2_rsqrd = rsqrd(X2_fitted, X2) + X3_rsqrd = rsqrd(X3_fitted, X3) + Y1_rsqrd = rsqrd(Y1_fitted, Y1) + Y2_rsqrd = rsqrd(Y2_fitted, Y2) + Y3_rsqrd = rsqrd(Y3_fitted, Y3) + + data_array[44:50] += np.array([X1_rsqrd, X2_rsqrd, X3_rsqrd, Y1_rsqrd, Y2_rsqrd, Y3_rsqrd]) + + if show_curvefit_graphs == True: + #Create intial guessed y-values (for graphing) + k1_guess = data_array[11] + k2_guess = data_array[12] + k3_guess = data_array[13] + k4_guess = data_array[14] + b1_guess = data_array[15] + b2_guess = data_array[16] + b3_guess = data_array[17] + F_guess = data_array[18] + m1_guess = data_array[19] + m2_guess = data_array[20] + m3_guess = data_array[21] + + re1_guess = re1(freq, k1_guess, k2_guess, k3_guess, k4_guess, b1_guess, b2_guess, b3_guess, F_guess, m1_guess, m2_guess, m3_guess) + re2_guess = re2(freq, k1_guess, k2_guess, k3_guess, k4_guess, b1_guess, b2_guess, b3_guess, F_guess, m1_guess, m2_guess, m3_guess) + re3_guess = re3(freq, k1_guess, k2_guess, k3_guess, k4_guess, b1_guess, b2_guess, b3_guess, F_guess, m1_guess, m2_guess, m3_guess) + im1_guess = im1(freq, k1_guess, k2_guess, k3_guess, k4_guess, b1_guess, b2_guess, b3_guess, F_guess, m1_guess, m2_guess, m3_guess) + im2_guess = im2(freq, k1_guess, k2_guess, k3_guess, k4_guess, b1_guess, b2_guess, b3_guess, F_guess, m1_guess, m2_guess, m3_guess) + im3_guess = im3(freq, k1_guess, k2_guess, k3_guess, k4_guess, b1_guess, b2_guess, b3_guess, F_guess, m1_guess, m2_guess, m3_guess) + + ## Begin graphing + fig = plt.figure(figsize=(16,11)) + gs = fig.add_gridspec(3, 3, hspace=0.25, wspace=0.05) + + ax1 = fig.add_subplot(gs[0, 0]) + ax2 = fig.add_subplot(gs[0, 1], sharex=ax1, sharey=ax1) + ax3 = fig.add_subplot(gs[0, 2], sharex=ax1, sharey=ax1) + ax4 = fig.add_subplot(gs[1, 0], sharex=ax1) + ax5 = fig.add_subplot(gs[1, 1], sharex=ax1, sharey=ax4) + ax6 = fig.add_subplot(gs[1, 2], sharex=ax1, sharey=ax4) + ax7 = fig.add_subplot(gs[2, 0], aspect='equal') + ax8 = fig.add_subplot(gs[2, 1], sharex=ax7, sharey=ax7, aspect='equal') + ax9 = fig.add_subplot(gs[2, 2], sharex=ax7, sharey=ax7, aspect='equal') + + #original data + ax1.plot(freq, X1,'ro', alpha=0.5, markersize=5.5, label = 'Data') + ax2.plot(freq, X2,'bo', alpha=0.5, markersize=5.5, label = 'Data') + ax3.plot(freq, X3,'go', alpha=0.5, markersize=5.5, label = 'Data') + ax4.plot(freq, Y1,'ro', alpha=0.5, markersize=5.5, label = 'Data') + ax5.plot(freq, Y2,'bo', alpha=0.5, markersize=5.5, label = 'Data') + ax6.plot(freq, Y3,'go', alpha=0.5, markersize=5.5, label = 'Data') + ax7.plot(X1,Y1,'ro', alpha=0.5, markersize=5.5, label = 'Data') + ax8.plot(X2,Y2,'bo', alpha=0.5, markersize=5.5, label = 'Data') + ax9.plot(X3,Y3,'go', alpha=0.5, markersize=5.5, label = 'Data') + + #fitted curves + ax1.plot(freq, X1_fitted,'c-', label='Best Fit', lw=2.5) + ax2.plot(freq, X2_fitted,'r-', label='Best Fit', lw=2.5) + ax3.plot(freq, X3_fitted,'m-', label='Best Fit', lw=2.5) + ax4.plot(freq, Y1_fitted,'c-', label='Best Fit', lw=2.5) + ax5.plot(freq, Y2_fitted,'r-', label='Best Fit', lw=2.5) + ax6.plot(freq, Y3_fitted,'m-', label='Best Fit', lw=2.5) + ax7.plot(X1_fitted, Y1_fitted, 'c-', label='Best Fit', lw=2.5) + ax8.plot(X2_fitted, Y2_fitted, 'r-', label='Best Fit', lw=2.5) + ax9.plot(X3_fitted, Y3_fitted, 'm-', label='Best Fit', lw=2.5) + + #inital guess curves + ax1.plot(freq, re1_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax2.plot(freq, re2_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax3.plot(freq, re3_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax4.plot(freq, im1_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax5.plot(freq, im2_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax6.plot(freq, im3_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax7.plot(re1_guess, im1_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax8.plot(re2_guess, im2_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + ax9.plot(re3_guess, im3_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') + + #Graph parts + fig.suptitle('Trimer Resonator: Real and Imaginary', fontsize=16) + ax1.set_title('Mass 1', fontsize=14) + ax2.set_title('Mass 2', fontsize=14) + ax3.set_title('Mass 3', fontsize=14) + ax1.set_ylabel('Real') + ax4.set_ylabel('Imaginary') + ax7.set_ylabel('Imaginary') + + ax1.label_outer() + ax2.label_outer() + ax3.label_outer() + ax5.tick_params(labelleft=False) + ax6.tick_params(labelleft=False) + ax7.label_outer() + ax8.label_outer() + ax9.label_outer() + + ax4.set_xlabel('Frequency') + ax5.set_xlabel('Frequency') + ax6.set_xlabel('Frequency') + ax7.set_xlabel('Real') + ax8.set_xlabel('Real') + ax9.set_xlabel('Real') + + ax1.legend() + ax2.legend() + ax3.legend() + ax4.legend() + ax5.legend() + ax6.legend() + ax7.legend(fontsize='10') + ax8.legend(fontsize='10') + ax9.legend(fontsize='10') + + plt.show() + save_figure(fig, graph_folder_name, graph_name) + + return data_array \ No newline at end of file diff --git a/trimer/curve_fitting_amp_phase_all.py b/trimer/curve_fitting_amp_phase_all.py new file mode 100644 index 0000000..37158fe --- /dev/null +++ b/trimer/curve_fitting_amp_phase_all.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Jul 16 11:31:59 2024 @author: lydiabullock """ import os import numpy as np import matplotlib.pyplot as plt import lmfit import warnings from Trimer_simulator import curve1, theta1, curve2, theta2, curve3, theta3, c1, t1, c2, t2, c3, t3 ''' 3 functions contained: multiple_fit - Curve fits to multiple Amplitude and Phase Curves at once - Calculates systematic error and returns a dictionary of info - Graphs curve fit analysis residuals - calculates residuals of multiple data sets and concatenates them - used in multiple_fit function to minimize the residuals of multiple graphs at the same time to find the best fit curve save_figure - saves the curve fit graph created to a named folder syserr - calculates systematic error rsqrd - calculates R^2 ''' def syserr(x_found, x_set, absval = True): with warnings.catch_warnings(): warnings.simplefilter('ignore') se = 100*(x_found-x_set)/x_set if absval: return abs(se) else: return se """ This definition of R^2 can come out negative. Negative means that a flat line would fit the data better than the curve. """ def rsqrd(model, data, plot=False, x=None, newfigure = True): SSres = sum((data - model)**2) SStot = sum((data - np.mean(data))**2) rsqrd = 1 - (SSres/ SStot) if plot: if newfigure: plt.figure() plt.plot(x,data, 'o') plt.plot(x, model, '--') return rsqrd #Calculates and concatenates residuals given multiple data sets #Takes in parameters, frequency, and dependent variables def residuals(params, wd, Amp1_data, Amp2_data, Amp3_data, Phase1_data, Phase2_data, Phase3_data, scaled): k1 = params['k1'].value k2 = params['k2'].value k3 = params['k3'].value k4 = params['k4'].value b1 = params['b1'].value b2 = params['b2'].value b3 = params['b3'].value F = params['F'].value m1 = params['m1'].value m2 = params['m2'].value m3 = params['m3'].value modelc1 = c1(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) modelc2 = c2(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) modelc3 = c3(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) modelt1 = t1(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) modelt2 = t2(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) modelt3 = t3(wd, k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3) residc1 = Amp1_data - modelc1 residc2 = Amp2_data - modelc2 residc3 = Amp3_data - modelc3 residt1 = Phase1_data - modelt1 residt2 = Phase2_data - modelt2 residt3 = Phase3_data - modelt3 #Trying to scale Amp and Phase because their units are different amp_max = max([max(residc1), max(residc2), max(residc3)]) phase_max = max([max(residt1), max(residt2), max(residt3)]) scaled_residc1 = [] scaled_residc2 = [] scaled_residc3 = [] scaled_residt1 = [] scaled_residt2 = [] scaled_residt3 = [] for amp1, amp2, amp3 in zip(residc1, residc2, residc3): scaled_residc1.append(amp1/amp_max) scaled_residc2.append(amp2/amp_max) scaled_residc3.append(amp3/amp_max) for phase1, phase2, phase3 in zip(residt1, residt2, residt3): scaled_residt1.append(phase1/phase_max) scaled_residt2.append(phase2/phase_max) scaled_residt3.append(phase3/phase_max) if scaled: return np.concatenate((scaled_residc1, scaled_residc2, scaled_residc3, scaled_residt1, scaled_residt2, scaled_residt3)) else: return np.concatenate((residc1, residc2, residc3, residt1, residt2, residt3)) def save_figure(figure, folder_name, file_name): # Create the folder if it does not exist if not os.path.exists(folder_name): os.makedirs(folder_name) # Save the figure to the folder file_path = os.path.join(folder_name, file_name) figure.savefig(file_path) #Takes in a *list* of correct parameters and a *list* of the guessed parameters, #as well as error and three booleans (whether you want to apply force to one or all masses, #scale by force, or fix the force) # #Returns a dataframe containing guessed parameters, recovered parameters, #and systematic error def multiple_fit_amp_phase(params_guess, params_correct, e, freq, force_all, fix_F, scaled, graph_folder_name, graph_name, show_curvefit_graphs = False): ##Put params_guess and params_correct into np array #Order added: k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3 data_array = np.zeros(52) #50 elements are generated in this code, but I leave the last entry empty because I want to time how long it takes the function to run in other code, so I'm giving the array space to add the time if necessary data_array[:11] += np.array(params_correct) data_array[11:22] += np.array(params_guess) ##Create data - functions from simulator code Amp1 = curve1(freq, data_array[0], data_array[1], data_array[2], data_array[3], data_array[4], data_array[5], data_array[6], data_array[7], data_array[8], data_array[9], data_array[10], e, force_all) Phase1 = theta1(freq, data_array[0], data_array[1], data_array[2], data_array[3], data_array[4], data_array[5], data_array[6], data_array[7], data_array[8], data_array[9], data_array[10], e, force_all) \ + 2 * np.pi Amp2 = curve2(freq, data_array[0], data_array[1], data_array[2], data_array[3], data_array[4], data_array[5], data_array[6], data_array[7], data_array[8], data_array[9], data_array[10], e, force_all) Phase2 = theta2(freq, data_array[0], data_array[1], data_array[2], data_array[3], data_array[4], data_array[5], data_array[6], data_array[7], data_array[8], data_array[9], data_array[10], e, force_all) \ + 2 * np.pi Amp3 = curve3(freq, data_array[0], data_array[1], data_array[2], data_array[3], data_array[4], data_array[5], data_array[6], data_array[7], data_array[8], data_array[9], data_array[10], e, force_all) Phase3 = theta3(freq, data_array[0], data_array[1], data_array[2], data_array[3], data_array[4], data_array[5], data_array[6], data_array[7], data_array[8], data_array[9], data_array[10], e, force_all) \ + 2 * np.pi #Create intial parameters params = lmfit.Parameters() params.add('k1', value = data_array[11], min=0) params.add('k2', value = data_array[12], min=0) params.add('k3', value = data_array[13], min=0) params.add('k4', value = data_array[14], min=0) params.add('b1', value = data_array[15], min=0) params.add('b2', value = data_array[16], min=0) params.add('b3', value = data_array[17], min=0) params.add('F', value = data_array[18], min=0) params.add('m1', value = data_array[19], min=0) params.add('m2', value = data_array[20], min=0) params.add('m3', value = data_array[21], min=0) #If you plan on fixing F so it cannot be changed if fix_F: params['F'].vary = False #get resulting data and fit parameters by minimizing the residuals result = lmfit.minimize(residuals, params, args = (freq, Amp1, Amp2, Amp3, Phase1, Phase2, Phase3, scaled)) #print(lmfit.fit_report(result)) ##Add recovered parameters and systematic error #Order added: k1, k2, k3, k4, b1, b2, b3, F, m1, m2, m3 param_values = np.array([result.params[param].value for param in result.params]) data_array[22:33] += param_values if fix_F == False: scaling_factor = (data_array[7])/(result.params['F'].value) data_array[22:33] *= scaling_factor syserr_result = syserr(data_array[22:33], data_array[:11]) data_array[33:44] += np.array(syserr_result) #average error data_array[-1] += np.sum(data_array[33:44]/10) #dividing by 10 because we aren't counting the error in Force because it is 0 #Create fitted y-values (for rsqrd and graphing) k1_fit = data_array[22] k2_fit = data_array[23] k3_fit = data_array[24] k4_fit = data_array[25] b1_fit = data_array[26] b2_fit = data_array[27] b3_fit = data_array[28] F_fit = data_array[29] m1_fit = data_array[30] m2_fit = data_array[31] m3_fit= data_array[32] Amp1_fitted = c1(freq, k1_fit, k2_fit, k3_fit, k4_fit, b1_fit, b2_fit, b3_fit, F_fit, m1_fit, m2_fit, m3_fit) Amp2_fitted = c2(freq, k1_fit, k2_fit, k3_fit, k4_fit, b1_fit, b2_fit, b3_fit, F_fit, m1_fit, m2_fit, m3_fit) Amp3_fitted = c3(freq, k1_fit, k2_fit, k3_fit, k4_fit, b1_fit, b2_fit, b3_fit, F_fit, m1_fit, m2_fit, m3_fit) Phase1_fitted = t1(freq, k1_fit, k2_fit, k3_fit, k4_fit, b1_fit, b2_fit, b3_fit, F_fit, m1_fit, m2_fit, m3_fit) Phase2_fitted = t2(freq, k1_fit, k2_fit, k3_fit, k4_fit, b1_fit, b2_fit, b3_fit, F_fit, m1_fit, m2_fit, m3_fit) Phase3_fitted = t3(freq, k1_fit, k2_fit, k3_fit, k4_fit, b1_fit, b2_fit, b3_fit, F_fit, m1_fit, m2_fit, m3_fit) #Calculate R^2 and add to data_array Amp1_rsqrd = rsqrd(Amp1_fitted, Amp1) Amp2_rsqrd = rsqrd(Amp2_fitted, Amp2) Amp3_rsqrd = rsqrd(Amp3_fitted, Amp3) Phase1_rsqrd = rsqrd(Phase1_fitted, Phase1) Phase2_rsqrd = rsqrd(Phase2_fitted, Phase2) Phase3_rsqrd = rsqrd(Phase3_fitted, Phase3) data_array[44:50] += np.array([Amp1_rsqrd, Amp2_rsqrd, Amp3_rsqrd, Phase1_rsqrd, Phase2_rsqrd, Phase3_rsqrd]) if show_curvefit_graphs == True: #Create intial guessed y-values (for graphing) k1_guess = data_array[11] k2_guess = data_array[12] k3_guess = data_array[13] k4_guess = data_array[14] b1_guess = data_array[15] b2_guess = data_array[16] b3_guess = data_array[17] F_guess = data_array[18] m1_guess = data_array[19] m2_guess = data_array[20] m3_guess = data_array[21] c1_guess = c1(freq, k1_guess, k2_guess, k3_guess, k4_guess, b1_guess, b2_guess, b3_guess, F_guess, m1_guess, m2_guess, m3_guess) c2_guess = c2(freq, k1_guess, k2_guess, k3_guess, k4_guess, b1_guess, b2_guess, b3_guess, F_guess, m1_guess, m2_guess, m3_guess) c3_guess = c3(freq, k1_guess, k2_guess, k3_guess, k4_guess, b1_guess, b2_guess, b3_guess, F_guess, m1_guess, m2_guess, m3_guess) t1_guess = t1(freq, k1_guess, k2_guess, k3_guess, k4_guess, b1_guess, b2_guess, b3_guess, F_guess, m1_guess, m2_guess, m3_guess) t2_guess = t2(freq, k1_guess, k2_guess, k3_guess, k4_guess, b1_guess, b2_guess, b3_guess, F_guess, m1_guess, m2_guess, m3_guess) t3_guess = t3(freq, k1_guess, k2_guess, k3_guess, k4_guess, b1_guess, b2_guess, b3_guess, F_guess, m1_guess, m2_guess, m3_guess) ## Begin graphing fig = plt.figure(figsize=(16,8)) gs = fig.add_gridspec(2, 3, hspace=0.1, wspace=0.1) ((ax1, ax2, ax3), (ax4, ax5, ax6)) = gs.subplots(sharex=True, sharey='row') #original data ax1.plot(freq, Amp1,'ro', alpha=0.5, markersize=5.5, label = 'Data') ax2.plot(freq, Amp2,'bo', alpha=0.5, markersize=5.5, label = 'Data') ax3.plot(freq, Amp3,'go', alpha=0.5, markersize=5.5, label = 'Data') ax4.plot(freq, Phase1,'ro', alpha=0.5, markersize=5.5, label = 'Data') ax5.plot(freq, Phase2,'bo', alpha=0.5, markersize=5.5, label = 'Data') ax6.plot(freq, Phase3,'go', alpha=0.5, markersize=5.5, label = 'Data') #fitted curves ax1.plot(freq, Amp1_fitted,'c-', label='Best Fit', lw=2.5) ax2.plot(freq, Amp2_fitted,'r-', label='Best Fit', lw=2.5) ax3.plot(freq, Amp3_fitted,'m-', label='Best Fit', lw=2.5) ax4.plot(freq, Phase1_fitted,'c-', label='Best Fit', lw=2.5) ax5.plot(freq, Phase2_fitted,'r-', label='Best Fit', lw=2.5) ax6.plot(freq, Phase3_fitted,'m-', label='Best Fit', lw=2.5) #inital guess curves ax1.plot(freq, c1_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') ax2.plot(freq, c2_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') ax3.plot(freq, c3_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') ax4.plot(freq, t1_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') ax5.plot(freq, t2_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') ax6.plot(freq, t3_guess, color='#4682B4', linestyle='dashed', label='Initial Guess') #Graph parts if scaled: fig.suptitle('Trimer Resonator: Amplitude and Phase (Scaled)', fontsize=16) else: fig.suptitle('Trimer Resonator: Amplitude and Phase (Not Scaled)', fontsize=16) ax1.set_title('Mass 1', fontsize=14) ax2.set_title('Mass 2', fontsize=14) ax3.set_title('Mass 3', fontsize=14) ax1.set_ylabel('Amplitude') ax4.set_ylabel('Phase') for ax in fig.get_axes(): ax.set(xlabel='Frequency') ax.label_outer() ax.legend() plt.show() save_figure(fig, graph_folder_name, graph_name) return data_array '''Begin Work - Does scaling the residuals change anything?''' #import pandas as pd # e = 0 # force_all = False # fix_F = False # freq = np.linspace(0.001, 4, 10) #this is using System 7 of 15 Systems - 10 Freqs NetMAP Better Params # params_correct = [1.427, 6.472, 3.945, 3.024, 0.675, 0.801, 0.191, 1, 7.665, 9.161, 7.139] # params_guess = [1.1942, 5.4801, 3.2698, 3.3004, 0.7682, 0.8185, 0.1765, 1, 7.4923, 8.9932, 8.1035] #Get the data (and the graphs) params_guess, params_correct, e, freq, force_all, fix_F, scaled, graph_folder_name, graph_name, show_curvefit_graphs = False # scaled_dict = multiple_fit_amp_phase(params_guess, params_correct, e, freq, force_all, fix_F, True, 'Scaling Amp_Phase Residuals', 'Scaled') # not_scaled_dict = multiple_fit_amp_phase(params_guess, params_correct, e, force_all, fix_F, False, 'Scaling Amp_Phase Residuals', 'Not_Scaled') # with pd.ExcelWriter('Scaling_Amp_Phase_Residuals.xlsx', engine='xlsxwriter') as writer: # dfscaled = pd.DataFrame(scaled_dict) # dfnotscaled = pd.DataFrame(not_scaled_dict) # dfscaled.to_excel(writer, sheet_name='Scaled', index=False) # dfnotscaled.to_excel(writer, sheet_name='Not Sclaed', index=False) \ No newline at end of file diff --git a/trimer/kind_of_trimer_resonatorfrequencypicker.py b/trimer/kind_of_trimer_resonatorfrequencypicker.py new file mode 100644 index 0000000..f7584ce --- /dev/null +++ b/trimer/kind_of_trimer_resonatorfrequencypicker.py @@ -0,0 +1,791 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Aug 9 16:07:55 2022 + +@author: vhorowit +""" + +import numpy as np +import trimer_resonatorphysics +from trimer_resonatorphysics import res_freq_weak_coupling, calcnarrowerW +from trimer_helperfunctions import read_params +import matplotlib.pyplot as plt +from Trimer_simulator import curve1, theta1, curve2, theta2 +from scipy.signal import find_peaks + +# default settings +verbose = False +n=100 +debug = False + +""" Given a limited set of available frequencies called "drive", +find those indices that most closely correspond to the desired frequencies. +This will not throw an err if two are the same; that could be added by checking if nunique is a shorter length. """ +def freqpoints(desiredfreqs, drive): + p = [] # p stands for frequency points; these are the indicies of frequencies that we will be measuring. + for f in desiredfreqs: + absolute_val_array = np.abs(drive - f) + f_index = absolute_val_array.argmin() + p.append(f_index) + return p + + +""" drive and phase are two lists of the same length + This will only return one frequency for each requested angle, even if there are additional solutions. + It's helpful if drive is morefrequencies. +""" +def find_freq_from_angle(drive, phase, angleswanted = [-np.pi/4], returnindex = False, verbose = False): + assert len(drive) == len(phase) + + #specialanglefreq = [drive[np.argmin(abs(phase%(2*np.pi) - anglewanted%(2*np.pi)))] \ + # for anglewanted in angleswanted ] + + threshold = np.pi/30 # small angle threshold + specialanglefreq = [] # initialize list + indexlist = [] + for anglewanted in angleswanted: + index = np.argmin(abs(phase%(2*np.pi) - anglewanted%(2*np.pi))) # find where phase is closest + + if index == 0 or index >= len(drive)-1: # edges of dataset require additional scrutiny + ## check to see if it's actually close after all + nearness = abs(phase[index]%(2*np.pi)-anglewanted%(2*np.pi)) + if nearness > threshold: + continue # don't include this index + specialanglefreq.append(drive[index]) + indexlist.append(index) + + if False: + plt.figure() + plt.plot(specialanglefreq,phase[indexlist]/np.pi) + plt.xlabel('Freq') + plt.ylabel('Angle (pi)') + + if returnindex: + return specialanglefreq, indexlist + else: + return specialanglefreq + +""" n is the number of frequencies is the drive; we'll have more for more frequencies. + Can you improve this by calling create_drive_arrays afterward? """ +def makemorefrequencies(vals_set, minfreq, maxfreq, MONOMER, forceall, + res1 = None, res2 = None, + includefreqs = None, n=n, staywithinlims = False): + [k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set] = read_params(vals_set) + + if res1 is None: + res1 = res_freq_weak_coupling(k1_set, m1_set, b1_set) + if not MONOMER and res2 is None: + res2 = res_freq_weak_coupling(k2_set, m2_set, b2_set) + + morefrequencies = np.linspace(minfreq, maxfreq, num = n*60) + if MONOMER: + morefrequencies = np.append(morefrequencies, [res1]) + else: + morefrequencies = np.append(morefrequencies, [res1,res2]) + + if includefreqs is not None: + morefrequencies = np.append(morefrequencies, np.array(includefreqs)) + + try: + W1 = trimer_resonatorphysics.approx_width(k = k1_set, m = m1_set, b=b1_set) + except ZeroDivisionError: + print('k1_set:', k1_set) + print('m1_set:', m1_set) + print('b1_set:', b1_set) + W1 = (maxfreq - minfreq)/5 + morefrequencies = np.append(morefrequencies, np.linspace(res1-W1, res1+W1, num = 7*n)) + morefrequencies = np.append(morefrequencies, np.linspace(res1-2*W1, res1+2*W1, num = 10*n)) + if not MONOMER: + W2 = trimer_resonatorphysics.approx_width(k = k2_set, m = m2_set, b=b2_set) + morefrequencies = np.append(morefrequencies, np.linspace(res2-W2, res2+W2, num = 7*n)) + morefrequencies = np.append(morefrequencies, np.linspace(res2-2*W2, res2+2*W2, num = 10*n)) + morefrequencies = list(np.sort(np.unique(morefrequencies))) + + while morefrequencies[0] < 0: + morefrequencies.pop(0) + + if staywithinlims: + while morefrequencies[0] < minfreq: + morefrequencies.pop(0) + while morefrequencies[-1] > maxfreq: + morefrequencies.pop(-1) + + return np.array(morefrequencies) + + +def create_drive_arrays(vals_set, MONOMER, forceboth, n=n, + morefrequencies = None, + minfreq = None, maxfreq = None, + staywithinlims = False, + includefreqs = [], + callmakemore = False, + verbose = verbose): + + if verbose: + print('Running create_drive_arrays()') + + [k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set] = read_params(vals_set) + + if morefrequencies is None: + if minfreq is None: + minfreq = 0.1 + if maxfreq is None: + maxfreq = 5 + morefrequencies=np.linspace(minfreq,maxfreq,50*n) + if minfreq is None: + minfreq = min(morefrequencies) + if maxfreq is None: + maxfreq = max(morefrequencies) + + if minfreq <= 0: + minfreq = 1e-6 + + if callmakemore: + evenmore = makemorefrequencies(vals_set=vals_set, minfreq=minfreq, maxfreq=maxfreq,MONOMER=MONOMER,forceboth=forceboth, + res1 = None, res2 = None, + includefreqs = None, n=n, staywithinlims = staywithinlims) + morefrequencies = np.sort(np.unique(np.append(morefrequencies, evenmore))) + + Q1 = trimer_resonatorphysics.approx_Q(k1_set, m1_set, b1_set) + + # set the fraction of points that are spread evenly in frequency (versus evenly in phase) + if MONOMER: + if Q1 >= 30: + fracevenfreq = .2 + elif Q1 >=10: + fracevenfreq = .4 + else: + fracevenfreq = .5 # such a broad peak that we might as well spread evenly in frequency + else: + Q2 = trimer_resonatorphysics.approx_Q(k2_set, m2_set, b2_set) + if Q1 >=30 and Q2 >= 30: + fracevenfreq = .2 + else: + fracevenfreq = .4 + if MONOMER: + # choose length of anglelist + m = n-3-int(fracevenfreq*n) # 3 are special angles; 20% are evenly spaced freqs + else: + m = int((n-3-(fracevenfreq*n))/2) + + morefrequencies = list(np.sort(morefrequencies)) + while morefrequencies[-1] > maxfreq: + if False: # too verbose! + print('Removing frequency', morefrequencies[-1]) + morefrequencies = morefrequencies[:-1] + while morefrequencies[0]< minfreq: + if False: + print('Removing frequency', morefrequencies[0]) + morefrequencies = morefrequencies[1:] + + phaseR1 = theta1(morefrequencies, k1_set, k2_set, k3_set, k4_set, + b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceboth=forceboth) + + anglelist = np.linspace(min(phaseR1), max(phaseR1), m) ## most of the points are evenly spaced in phase + #anglelist = np.append(anglelist, -np.pi/3) + #anglelist = np.append(anglelist, -2*np.pi/3) + anglelist = np.append(anglelist, -np.pi/4) # special angle 1 + anglelist = np.append(anglelist, -3*np.pi/4) # special angle 2 + anglelist = np.unique(np.sort(np.append(anglelist, -np.pi/2))) # special angle 3 + + freqlist = find_freq_from_angle(morefrequencies, + phase = phaseR1, + angleswanted = anglelist, verbose = verbose) + if False: + print('anglelist/pi:', anglelist/np.pi, 'corresponds to frequency list:', freqlist, '. But still adding to chosendrive.') + + if not MONOMER: + phaseR2 = theta2(morefrequencies, k1_set, k2_set, k3_set, k4_set, + b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, 0, forceboth=forceboth) + + del anglelist + anglelist = np.linspace(min(phaseR2), max(phaseR2), m) + #anglelist = np.append(anglelist, -np.pi/3) + #anglelist = np.append(anglelist, -2*np.pi/3) + anglelist = np.append(anglelist, -np.pi/4) + anglelist = np.append(anglelist, -3*np.pi/4) + anglelist = np.unique(np.sort(np.append(anglelist, -np.pi/2))) + + freqlist2 = find_freq_from_angle(morefrequencies, + phase = phaseR2, + angleswanted = anglelist) + if False: + print('anglelist/pi: ', anglelist/np.pi) + print('freqlist2: ', freqlist2) + freqlist.extend(freqlist2) + res2 = res_freq_weak_coupling(k2_set, m2_set, b2_set) + freqlist.append(res2) + morefrequencies = np.append(morefrequencies,res2) + + freqlist.extend(includefreqs) + morefrequencies = np.append(morefrequencies, includefreqs) + res1 = res_freq_weak_coupling(k1_set, m1_set, b1_set) + freqlist.append(res1) + try: + reslist = res_freq_numeric(vals_set=vals_set, MONOMER=MONOMER, mode = 'all', forceboth=forceboth, + minfreq=minfreq, maxfreq=maxfreq, morefrequencies=morefrequencies, + unique = True, veryunique = True, verboseplot = False, verbose=verbose, iterations = 3) + freqlist.extend(reslist) + except NameError: + pass + + freqlist = list(np.sort(np.unique(freqlist))) + + while freqlist[0] < 0: + freqlist.pop(0) # drop negative frequencies + + numwanted = n-len(freqlist) # how many more frequencies are wanted? + evenlyspacedfreqlist = np.linspace(minfreq, maxfreq, + num = max(numwanted + 2,3)) # I added 2 for the endpoints + freqlist.extend(evenlyspacedfreqlist) + #print(freqlist) + chosendrive = list(np.sort(np.unique(np.array(freqlist)))) + + if staywithinlims: + while chosendrive[0] < minfreq or chosendrive[0] < 0: + f = chosendrive.pop(0) + if verbose: + print('Warning: Unexpected frequency', f) + while chosendrive[-1] > maxfreq: + f = chosendrive.pop(-1) + if verbose: + print('Warning: Unexpected frequency', f) + else: + while chosendrive[0] < 0: + f = chosendrive.pop(0) + print('Warning: Unexpected negative frequency', f) + chosendrive = np.array(chosendrive) + + #morefrequencies.extend(chosendrive) + morefrequencies = np.concatenate((morefrequencies, chosendrive)) + morefrequencies = list(np.sort(np.unique(morefrequencies))) + + if staywithinlims: + while morefrequencies[0] < minfreq: + f = morefrequencies.pop(0) + print('Warning: Unexpected frequency', f) + while morefrequencies[-1] > maxfreq: + f = morefrequencies.pop(-1) + print('warning: Unexpected frequency', f) + + return chosendrive, np.array(morefrequencies) + +def find_special_freq(drive, amp, phase, anglewanted = np.radians(225)): + maxampfreq = drive[np.argmax(amp)] + specialanglefreq = drive[np.argmin(abs(phase%(2*np.pi) - anglewanted%(2*np.pi)))] + return maxampfreq, specialanglefreq + + +### res_freq_numeric() +## Uses privilege +## Not guaranteed to find all resonance peaks but should work ok for dimer +## Returns list of peak frequencies. +## If numtoreturn is None, then any number of frequencies could be returned. +## You can also set numtoreturn to 1 or 2 to return that number of frequencies. +def res_freq_numeric(vals_set, MONOMER, forceall, + mode = 'all', + minfreq=.1, maxfreq=5, morefrequencies=None, includefreqs = [], + unique = True, veryunique = True, numtoreturn = None, + verboseplot = True, plottitle = None, verbose=verbose, iterations = 1, + use_R2_only = False, + returnoptions = False): + + if verbose: + print('\nRunning res_freq_numeric() with mode ' + mode) + if plottitle is not None: + print(plottitle) + k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set = read_params(vals_set) + + # Never Monomer in this case + if MONOMER and numtoreturn != 2: # 2 is a tricky case... just use the rest of the algorithm + if numtoreturn is not None and numtoreturn != 1: + print('Cannot return ' + str(numtoreturn) + ' res freqs for Monomer.') + if verbose: + print('option 1') + + freqlist = [res_freq_weak_coupling(k1_set, m1_set, b1_set)] # just compute it directly for Monomer + if returnoptions: + return freqlist, 1 + return freqlist + + approx_res_freqs = [res_freq_weak_coupling(k1_set, m1_set, b1_set)] + if not MONOMER: + approx_res_freqs.append(res_freq_weak_coupling(k2_set, m2_set, b2_set)) + + for f in approx_res_freqs: + if f > maxfreq or f < minfreq: + print('Warning! Check minfreq and maxfreq') + print('minfreq', minfreq) + print('maxfreq', maxfreq) + print('Approx resonant freq', f) + + if morefrequencies is None: + morefrequencies = makemorefrequencies(vals_set=vals_set, minfreq=minfreq, maxfreq=maxfreq, + forceall=forceall, includefreqs = approx_res_freqs, + MONOMER=MONOMER, n=n) + else: + morefrequencies = np.append(morefrequencies, approx_res_freqs) + morefrequencies = np.sort(np.unique(morefrequencies)) + + # init + indexlist = [] + + # Never Monomer in this case + if MONOMER: + freqlist = [res_freq_weak_coupling(k1_set, m1_set, b1_set)] + resfreqs_from_amp = freqlist + else: + first = True + for i in range(iterations): + if not first: # not first. This is a repeated iteration. indexlist has been defined. + if verbose: + print('indexlist:', indexlist) + if max(indexlist) > len(morefrequencies): + print('len(morefrequencies):', len(morefrequencies)) + print('morefrequencies:', morefrequencies) + print('indexlist:', indexlist) + print('Repeating with finer frequency mesh around frequencies:', morefrequencies[np.sort(indexlist)]) + + assert min(morefrequencies) >= minfreq + assert max(morefrequencies) <= maxfreq + if debug: + print('minfreq', minfreq) + print('Actual min freq', min(morefrequencies)) + print('maxfreq', maxfreq) + print('Actual max freq', max(morefrequencies)) + morefrequenciesprev = morefrequencies.copy() + for index in indexlist: + try: + spacing = abs(morefrequenciesprev[index] - morefrequenciesprev[index-1]) + except: + if verbose: + print('morefrequenciesprev:',morefrequenciesprev) + print('index:', index) + spacing = abs(morefrequenciesprev[index+1] - morefrequenciesprev[index]) + finerlist = np.linspace(max(minfreq,morefrequenciesprev[index]-spacing), + min(maxfreq,morefrequenciesprev[index] + spacing), + num = n) + assert min(finerlist) >= minfreq + assert max(finerlist) <= maxfreq + morefrequencies = np.append(morefrequencies,finerlist) + morefrequencies = np.sort(np.unique(morefrequencies)) + + + while morefrequencies[-1] > maxfreq: + if False: # too verbose! + print('Removing frequency', morefrequencies[-1]) + morefrequencies = morefrequencies[:-1] + while morefrequencies[0]< minfreq: + if False: + print('Removing frequency', morefrequencies[0]) + morefrequencies = morefrequencies[1:] + R1_amp_noiseless = curve1(morefrequencies, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall) + R1_phase_noiseless = theta1(morefrequencies, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall) + R1_phase_noiseless = np.unwrap(R1_phase_noiseless) + if debug: + plt.figure() + plt.plot(morefrequencies, R1_amp_noiseless, label = 'R1_amp') + plt.plot(morefrequencies, R1_phase_noiseless, label = 'R1_phase') + if not MONOMER: + R2_amp_noiseless = curve2(morefrequencies, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall) + R2_phase_noiseless = theta2(morefrequencies, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall) + R2_phase_noiseless = np.unwrap(R2_phase_noiseless) + if debug: + plt.plot(morefrequencies, R2_amp_noiseless, label = 'R2_amp') + plt.plot(morefrequencies, R2_phase_noiseless, label = 'R2_phase') + + ## find maxima + index1 = np.argmax(R1_amp_noiseless) + if not MONOMER and not use_R2_only: + indexlist1, heights = find_peaks(R1_amp_noiseless, height=.015, distance = 5) + if debug: + print('index1:', index1) + print('indexlist1:',indexlist1) + print('heights', heights) + plt.axvline(morefrequencies[index1]) + for i in indexlist1: + plt.axvline(morefrequencies[i]) + assert index1 <= len(morefrequencies) + if len(indexlist1)>0: + assert max(indexlist1) <= len(morefrequencies) + else: + print('Warning: find_peaks on R1_amp returned indexlist:', indexlist1) + plt.figure() + plt.plot(R1_amp_noiseless) + plt.xlabel(R1_amp_noiseless) + plt.figure() + else: + indexlist1 = [] + if MONOMER: + indexlist2 = [] + else: + index2 = np.argmax(R2_amp_noiseless) + indexlist2, heights2 = find_peaks(R2_amp_noiseless, height=.015, distance = 5) + assert index2 <= len(morefrequencies) + if len(indexlist2) >0: + assert max(indexlist2) <= len(morefrequencies) + + if verbose: + print('Maximum amplitude for R1 is ', R1_amp_noiseless[index1], 'at', morefrequencies[index1]) + if not MONOMER: + print('Maximum amplitude for R2 is ', R2_amp_noiseless[index2], 'at', morefrequencies[index2]) + + indexlistampR1 = np.append(indexlist1,index1) + assert max(indexlistampR1) <= len(morefrequencies) + if False: # too verbose! + print('indexlistampR1:', indexlistampR1) + if MONOMER: + indexlist = indexlistampR1 + assert max(indexlist) <= len(morefrequencies) + indexlistampR2 = [] + else: + indexlistampR2 = np.append(indexlist2, index2) + if False: + print('indexlistampR2:',indexlistampR2) + assert max(indexlistampR2) <= len(morefrequencies) + indexlist = np.append(indexlistampR1, indexlistampR2) + if False: + print('indexlist:', indexlist) + + assert max(indexlist) <= len(morefrequencies) + indexlist = list(np.unique(indexlist)) + indexlist = [int(index) for index in indexlist] + first = False + + ## Check to see if findpeaks just worked + if (numtoreturn == 2) and (mode != 'phase'): + thresh = .006 + if len(indexlist2) == 2: + if verbose: + print("Used findpeaks on R2 amplitude (option 2)") + opt2freqlist = list(np.sort(morefrequencies[indexlist2])) + if abs(opt2freqlist[1]-opt2freqlist[0]) > thresh: + if returnoptions: + return opt2freqlist, 2 + return opt2freqlist + if len(indexlist1) == 2 and not use_R2_only: + opt3freqlist = list(np.sort(morefrequencies[indexlist1])) + if abs(opt3freqlist[1]-opt3freqlist[0]) > thresh: + if verbose: + print("Used findpeaks on R1 amplitude (option 3)") + if returnoptions: + return opt3freqlist, 3 + return opt3freqlist + if verbose: + print('indexlist1 from R1 amp find_peaks is', indexlist1) + print('indexlist2 from R2 amp find_peaks is', indexlist2) + + if verbose: + print('indexlist:',indexlist) + resfreqs_from_amp = morefrequencies[indexlist] + + if not MONOMER or mode == 'phase': + ## find where angles are resonant angles + angleswanted = [np.pi/2, -np.pi/2] # the function will wrap angles so don't worry about mod 2 pi. + R1_flist,indexlistphaseR1 = find_freq_from_angle(morefrequencies, R1_phase_noiseless, angleswanted=angleswanted, returnindex=True) + if MONOMER: + assert mode == 'phase' + resfreqs_from_phase = R1_flist + else: + R2_flist,indexlistphaseR2 = find_freq_from_angle(morefrequencies, R2_phase_noiseless, angleswanted=angleswanted, + returnindex=True) + resfreqs_from_phase = np.append(R1_flist, R2_flist) + else: + assert MONOMER + resfreqs_from_phase = [] # don't bother with this for the MONOMER + indexlistphaseR1 = [] + indexlistphaseR2 = [] + R1_flist = [] + + if verboseplot: + #Never Monomer in this case + if MONOMER: # still need to calculate the curves + R1_amp_noiseless = curve1(morefrequencies, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall) + R1_phase_noiseless = theta1(morefrequencies, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall) + R1_phase_noiseless = np.unwrap(R1_phase_noiseless) + indexlistampR1 = [np.argmin(abs(w - morefrequencies )) for w in resfreqs_from_amp] + print('Plotting!') + fig, (ampax, phaseax) = plt.subplots(2,1,gridspec_kw={'hspace': 0}, sharex = 'all') + plt.sca(ampax) + plt.title(plottitle) + plt.plot(morefrequencies, R1_amp_noiseless, color='gray') + if not MONOMER: + plt.plot(morefrequencies, R2_amp_noiseless, color='lightblue') + + plt.plot(morefrequencies[indexlistampR1],R1_amp_noiseless[indexlistampR1], '.') + if not MONOMER: + plt.plot(morefrequencies[indexlistampR2],R2_amp_noiseless[indexlistampR2], '.') + + plt.sca(phaseax) + plt.plot(morefrequencies,R1_phase_noiseless, color='gray' ) + if not MONOMER: + plt.plot(morefrequencies,R2_phase_noiseless, color = 'lightblue') + plt.plot(R1_flist, theta1(np.array(R1_flist), k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall), '.') + if not MONOMER: + plt.plot(R2_flist, theta2(np.array(R2_flist), k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall), '.') + + if mode == 'maxamp' or mode == 'amp' or mode == 'amplitude': + freqlist = resfreqs_from_amp + elif mode == 'phase': + freqlist = resfreqs_from_phase + else: + if mode != 'all': + print("Set mode to any of 'all', 'maxamp', or 'phase'. Recovering to 'all'.") + # mode is 'all' + freqlist = np.sort(np.append(resfreqs_from_amp, resfreqs_from_phase)) + + + if veryunique: # Don't return both close frequencies; just pick the higher amplitude frequency of the two. + ## I obtained indexlists four ways: indexlistampR1, indexlistampR2, indexlistphaseR1, indexlistphaseR2 + indexlist = indexlist + indexlistphaseR1 + if not MONOMER: + indexlist = indexlist + indexlistphaseR2 + indexlist = list(np.sort(np.unique(indexlist))) + if verbose: + print('indexlist:', indexlist) + + narrowerW = calcnarrowerW(vals_set, MONOMER) + + """ a and b are indices of morefrequencies """ + def veryclose(a,b): + ## option 1: veryclose if indices are within 2. + #return abs(b-a) <= 2 + + ## option 2: very close if frequencies are closer than .01 rad/s + #veryclose = abs(morefrequencies[a]-morefrequencies[b]) <= .1 + + ## option 3: very close if freqeuencies are closer than W/20 + veryclose = abs(morefrequencies[a]-morefrequencies[b]) <= narrowerW/20 + + return veryclose + + if len(freqlist) > 1: + ## if two elements of indexlist are veryclose to each other, want to remove the smaller amplitude. + removeindex = [] # create a list of indices to remove + try: + tempfreqlist = morefrequencies[indexlist] # indexlist is indicies of morefrequencies. + # if the 10th element of indexlist is indexlist[10]=200, then tempfreqlist[10] = morefrequencies[200] + except: + print('indexlist:', indexlist) + A2 = curve2(tempfreqlist, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, 0, forceall) + # and then A2[10] is the amplitude of R2 at the frequency morefrequencies[200] + # and then the number 10 is the sort of number we will add to a removeindex list + for i in range(len(indexlist)-1): + if veryclose(indexlist[i], indexlist[i-1]): + if A2[i] < A2[i-1]: # remove the smaller amplitude + removeindex.append(i) + else: + removeindex.append(i-1) + numtoremove = len(removeindex) + if verbose and numtoremove > 0: + print('Removing', numtoremove, 'frequencies') + + removeindex = list(np.unique(removeindex)) + indexlist = list(indexlist) + ## Need to work on removal from the end of the list + ## in order to avoid changing index numbers while working with the list + while removeindex != []: + i = removeindex.pop(-1) # work backwards through indexes to remove + el = indexlist.pop(i) # remove it from indexlist + if numtoremove < 5 and verbose: + print('Removed frequency', morefrequencies[el]) + + freqlist = morefrequencies[indexlist] + + freqlist = np.sort(freqlist) + + if unique or veryunique or (numtoreturn is not None): ## Don't return multiple copies of the same number. + freqlist = np.unique(np.array(freqlist)) + + if verbose: + print('Possible frequencies are:', freqlist) + + if numtoreturn is not None: + if len(freqlist) == numtoreturn: + if verbose: + print ('option 4') + if returnoptions: + return list(freqlist), 4 + return list(freqlist) + if len(freqlist) < numtoreturn: + if verbose: + print('Warning: I do not have as many resonant frequencies as was requested.') + freqlist = list(freqlist) + # instead I should add another frequency corresponding to some desireable phase. + if verbose: + print('Returning instead a freq2 at phase -3pi/4.') + goodphase = -3*np.pi/4 + for i in range(iterations): + f2, ind2 = find_freq_from_angle(drive = morefrequencies, + phase = theta1(morefrequencies, + k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall), + angleswanted = [goodphase], returnindex = True) + ind2 = ind2[0] + try: + spacing = abs(morefrequencies[ind2] - morefrequencies[ind2-1]) + except IndexError: + spacing = abs(morefrequencies[ind2+1] - morefrequencies[ind2]) + finermesh = np.linspace(morefrequencies[ind2] - spacing,morefrequencies[ind2] + spacing, num=n) + morefrequencies = np.append(morefrequencies, finermesh) + f2 = f2[0] + freqlist.append(f2) + if verboseplot: + plt.sca(phaseax) + plt.plot(f2, theta1(f2, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall), '.') + print('Appending: ', f2) + for i in range(numtoreturn - len(freqlist)): + # This is currently unlikely to be true, but I'm future-proofing + # for a future when I want to set the number to an integer greater than 2. + freqlist.append(np.nan) # increase list to requested length with nan + if verboseplot: + plt.sca(ampax) + plt.plot(freqlist, curve1(freqlist, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall), 'x') + if verbose: + print ('option 5') + if returnoptions: + return freqlist, 5 + return freqlist + + R1_amp_noiseless = curve1(freqlist, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall) + R2_amp_noiseless = curve2(freqlist, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall) + + topR1index = np.argmax(R1_amp_noiseless) + + if numtoreturn == 1: + # just return the one max amp frequency. + if verbose: + print('option 6') + if returnoptions: + return [freqlist[topR1index]],6 + return [freqlist[topR1index]] + + if numtoreturn != 2: + print('Warning: returning ' + str(numtoreturn) + ' frequencies is not implemented. Returning 2 frequencies.') + + # Choose a second frequency to return. + topR2index = np.argmax(R2_amp_noiseless) + threshold = .2 # rad/s + if abs(freqlist[topR1index] - freqlist[topR2index]) > threshold: + freqlist = list([freqlist[topR1index], freqlist[topR2index]]) + if verboseplot: + plt.sca(ampax) + plt.plot(freqlist, curve1(freqlist, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall), 'x') + if verbose: + print('option 7') + if returnoptions: + return freqlist, 7 + return freqlist + else: + R1_amp_noiseless = list(R1_amp_noiseless) + freqlist = list(freqlist) + f1 = freqlist.pop(topR1index) + R1_amp_noiseless.pop(topR1index) + secondR1index = np.argmax(R1_amp_noiseless) + f2 = freqlist.pop(secondR1index) + if abs(f2-f1) > threshold: + freqlist = list([f1, f2]) # overwrite freqlist + if verboseplot: + plt.sca(ampax) + plt.plot(freqlist, curve1(freqlist, k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set, + 0, forceall), 'x') + if verbose: + print('option 8') + if returnoptions: + return freqlist, 8 + return freqlist + else: # return whatever element of the freqlist is furthest + freqlist.append(f2) + # is f1 closer to top or bottom of freqlist? + if abs(f1 - min(freqlist)) > abs(f1 - max(freqlist)): + if verbose: + print('option 9') + if returnoptions: + return [f1, min(freqlist)], 9 + return [f1, min(freqlist)] + else: + if verbose: + print('option 10') + if returnoptions: + return [f1, max(freqlist)], 10 + return [f1, max(freqlist)] + + + else: + if verbose: + print('option 11') + if returnoptions: + return list(freqlist),11 + return list(freqlist) + + +# create list of all measured frequencies, centered around res (the resonance frequency), and spaced out by freqdiff +def allmeasfreq_one_res(res, max_num_p, freqdiff): + newfreqplus = res + newfreqminus = res + freqlist = [res] + while len(freqlist) < max_num_p: + newfreqplus = newfreqplus + freqdiff + newfreqminus = newfreqminus - freqdiff + freqlist.append(newfreqplus) + freqlist.append(newfreqminus) + if min(freqlist) < 0: + print('Value less than zero!') + print('min(freqlist):', min(freqlist)) + return freqlist + +# create list of all measured frequencies, centered around res1 and res2, respectively, and spaced out by freqdiff +def allmeasfreq_two_res(res1, res2, max_num_p, freqdiff): + newfreq1plus = res1 + newfreq1minus = res1 + newfreq2plus = res2 + newfreq2minus = res2 + freqlist = [res1, res2] + while len(freqlist) < max_num_p: + newfreq1plus = newfreq1plus + freqdiff + newfreq1minus = newfreq1minus - freqdiff + newfreq2plus = newfreq2plus + freqdiff + newfreq2minus = newfreq2minus - freqdiff + freqlist.append(newfreq1plus) ## this order might matter + freqlist.append(newfreq2plus) + freqlist.append(newfreq1minus) + freqlist.append(newfreq2minus) + if min(freqlist) < 0: + print('Value less than zero!') + print('min(freqlist):', min(freqlist)) + return freqlist + + + +def best_choice_freq_set(vals_set, MONOMER, forceboth, reslist, num_p = 10): + [k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set] = read_params(vals_set) + narrowerW = calcnarrowerW(vals_set, MONOMER) + freqdiff = round(narrowerW/6,4) + if MONOMER: + measurementfreqs = allmeasfreq_one_res(reslist[0], num_p, freqdiff) + else: + measurementfreqs = allmeasfreq_two_res(reslist[0], reslist[1], num_p, freqdiff) + + return measurementfreqs[:num_p] + + + + + + + + diff --git a/trimer/trimer_case_study_frequency_picker.py b/trimer/trimer_case_study_frequency_picker.py new file mode 100644 index 0000000..2336ae6 --- /dev/null +++ b/trimer/trimer_case_study_frequency_picker.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Mon Jul 29 10:55:10 2024 + +@author: lydiabullock +""" +''' Case Study for System 0/2 from '15 Systems - 10 Freqs NetMAP' + Using "ideal" frequencies to test NetMAP. + The frequencies picked are only based off of the first two resonators but use the trimer information.''' + +from comparing_curvefit_types import run_trials +from kind_of_trimer_resonatorfrequencypicker import res_freq_numeric +import math +import matplotlib.pyplot as plt + +''' Begin Work Here. ''' + +MONOMER = False +forceall = False + +## System 0 from '15 Systems - 10 Freqs NetMAP' +# true_parameters = [1.045, 0.179, 3.852, 1.877, 5.542, 1.956, 3.71, 1, 3.976, 0.656, 3.198] +# guessed_parameters = [1.2379, 0.1764, 3.7327, 1.8628, 5.93, 2.1793, 4.2198, 1, 4.3335, 0.7016, 3.0719] + +## System 2 from '15 Systems - 10 Freqs NetMAP' +# true_parameters = [3.264, 7.71, 6.281, 3.564, 5.859, 0.723, 3.087, 1, 3.391, 3.059, 7.796] +# guessed_parameters = [3.1169, 7.0514, 6.9721, 3.6863, 4.9006, 0.707, 3.2658, 1, 2.9289, 2.7856, 6.8323] + +## System 8 from '15 systems - 10 Freqs NetMAP & Better Parameters' +true_parameters = [7.731, 1.693, 2.051, 8.091, 0.427, 0.363, 0.349, 1, 7.07, 7.195, 4.814] +guessed_parameters = [7.2806, 1.8748, 1.8077, 8.7478, 0.3767, 0.2974, 0.3744, 1, 7.4933, 6.7781, 4.2136] + +best_frequencies_list = res_freq_numeric(true_parameters, MONOMER, forceall) +best_frequencies_list = [x for x in best_frequencies_list if not math.isnan(x)] +length_noise_NetMAP = len(best_frequencies_list) + +#Run Trials +if length_noise_NetMAP == 0: + print('No Possible Frequencies.') +else: + print(f'Best frequencies to use are: {best_frequencies_list}') + avg_e1_list, avg_e2_list, avg_e3_list, avg_e1_bar, avg_e2_bar, avg_e3_bar = run_trials(true_parameters, guessed_parameters, best_frequencies_list, length_noise_NetMAP, 50, 'Sys8_Better_Params_Freq_Pick.xlsx', 'Sys8_Better_Params_Freq_Pick - Plots') + + #Create histogram + plt.title('Average Systematic Error Across Parameters') + plt.xlabel('') + plt.ylabel('Counts') + plt.hist(avg_e2_list, alpha=0.5, color='green', label='Cartesian (X & Y)', edgecolor='black') + plt.hist(avg_e1_list, alpha=0.5, color='blue', label='Polar (Amp & Phase)', edgecolor='black') + plt.hist(avg_e3_list, bins=50, alpha=0.5, color='red', label='NetMAP', edgecolor='black') + plt.legend(loc='upper center') + + plt.savefig('_Histogram_Sys8_Better_Params_Freq_Pick.png') + diff --git a/trimer/trimer_frequency_study.py b/trimer/trimer_frequency_study.py new file mode 100644 index 0000000..b9c5ee6 --- /dev/null +++ b/trimer/trimer_frequency_study.py @@ -0,0 +1,179 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Tue Jul 30 12:35:48 2024 + +@author: lydiabullock +""" +from comparing_curvefit_types import complex_noise, get_parameters_NetMAP, find_avg_e, automate_guess +import numpy as np +import pandas as pd +import matplotlib.pyplot as plt +from Trimer_simulator import realamp1, realamp2, realamp3, imamp1, imamp2, imamp3, curve1, theta1, curve2, theta2, curve3, theta3 +import sys +import os +myheatmap = os.path.abspath('..') +sys.path.append(myheatmap) +from myheatmap import myheatmap +import matplotlib.colors as mcolors + + +def plot_data(frequencies, params_guess, params_correct, e, force_all): + #Original Data + X1 = realamp1(frequencies, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + Y1 = imamp1(frequencies, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + + X2 = realamp2(frequencies, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + Y2 = imamp2(frequencies, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + + X3 = realamp3(frequencies, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + Y3 = imamp3(frequencies, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + + Amp1 = curve1(frequencies, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + Phase1 = theta1(frequencies, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) \ + + 2 * np.pi + Amp2 = curve2(frequencies, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + Phase2 = theta2(frequencies, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) \ + + 2 * np.pi + Amp3 = curve3(frequencies, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) + Phase3 = theta3(frequencies, params_correct[0], params_correct[1], params_correct[2], params_correct[3], params_correct[4], params_correct[5], params_correct[6], params_correct[7], params_correct[8], params_correct[9], params_correct[10], e, force_all) \ + + 2 * np.pi + + ## Begin graphing - Re vs Im + fig = plt.figure(figsize=(10,6)) + gs = fig.add_gridspec(1, 3, width_ratios=[1,1,1], hspace=0.25, wspace=0.05) + + ax1 = fig.add_subplot(gs[0, 0], aspect='equal') + ax2 = fig.add_subplot(gs[0, 1], sharex=ax1, sharey=ax1, aspect='equal') + ax3 = fig.add_subplot(gs[0, 2], sharex=ax1, sharey=ax1, aspect='equal') + + #Original Data + ax1.plot(X1,Y1,'ro', alpha=0.5, markersize=5.5, label = 'Data') + ax2.plot(X2,Y2,'bo', alpha=0.5, markersize=5.5, label = 'Data') + ax3.plot(X3,Y3,'go', alpha=0.5, markersize=5.5, label = 'Data') + + fig.suptitle('Trimer Resonator: Real and Imaginary', fontsize=16) + ax1.set_title('Resonator 1', fontsize=14) + ax2.set_title('Resonator 2', fontsize=14) + ax3.set_title('Resonator 3', fontsize=14) + ax1.set_ylabel('Im(Z) (m)') + ax1.set_xlabel('Re(Z) (m)') + ax2.set_xlabel('Re(Z) (m)') + ax3.set_xlabel('Re(Z) (m)') + ax1.label_outer() + ax2.label_outer() + ax3.label_outer() + plt.show() + + ## Begin graphing - Amp and Phase + fig = plt.figure(figsize=(16,8)) + gs = fig.add_gridspec(2, 3, hspace=0.1, wspace=0.1) + ((ax1, ax2, ax3), (ax4, ax5, ax6)) = gs.subplots(sharex=True, sharey='row') + + #original data + ax1.plot(frequencies, Amp1,'ro', alpha=0.5, markersize=5.5, label = 'Data') + ax2.plot(frequencies, Amp2,'bo', alpha=0.5, markersize=5.5, label = 'Data') + ax3.plot(frequencies, Amp3,'go', alpha=0.5, markersize=5.5, label = 'Data') + ax4.plot(frequencies, Phase1,'ro', alpha=0.5, markersize=5.5, label = 'Data') + ax5.plot(frequencies, Phase2,'bo', alpha=0.5, markersize=5.5, label = 'Data') + ax6.plot(frequencies, Phase3,'go', alpha=0.5, markersize=5.5, label = 'Data') + + #Graph parts + fig.suptitle('Trimer Resonator: Amplitude and Phase', fontsize=16) + ax1.set_title('Resonator 1', fontsize=14) + ax2.set_title('Resonator 2', fontsize=14) + ax3.set_title('Resonator 3', fontsize=14) + ax1.set_ylabel('Amplitude') + ax4.set_ylabel('Phase') + + for ax in fig.get_axes(): + ax.set(xlabel='Frequency') + ax.label_outer() + + plt.show() + +#Code that loops through frequency points of different spacing + +def sweep_freq_pair(frequencies, params_guess, params_correct, e, force_all): + + #Graph Real vs Imaginary for the trimer + plot_data(frequencies, params_guess, params_correct, e, force_all) + + # Loop over possible combinations of frequency indices, i1 and i2 + for i1 in range(len(frequencies)): + freq1 = frequencies[i1] + + + for i2 in range(len(frequencies)): + freq2 = frequencies[i2] + freqs = [freq1, freq2] + e_2freqs = complex_noise(2,2) + + NetMAP_info = get_parameters_NetMAP(freqs, params_guess, params_correct, e_2freqs, force_all) + + #Find (average across parameters) for the trial and add to dictionary + avg_e1 = find_avg_e(NetMAP_info) + NetMAP_info[''] = avg_e1 + NetMAP_info['freq1'] = freq1 + NetMAP_info['freq2'] = freq2 + + # Convert lists to scalars when they contain only one item + for key in NetMAP_info: + if isinstance(NetMAP_info[key], list) and len(NetMAP_info[key]) == 1: + NetMAP_info[key] = NetMAP_info[key][0] + + NetMAP_df = pd.DataFrame([NetMAP_info]) + + try: # repeated experiments results + resultsdf = pd.concat([resultsdf, NetMAP_df], ignore_index=True) + except: + resultsdf = NetMAP_df + + return resultsdf + + +''' Begin work here. ''' + +##Create the System +#Randomly chosen one that "looks easy" +# params_correct = [3, 3, 3, 3, 0.5, 0.5, 0.1, 1, 2, 5, 5] +# params_guess = automate_guess(params_correct, 20) + +#Worst system - System 8 from ‘15 systems - 10 Freqs NetMAP & Better Parameters’ +params_correct = [7.731, 1.693, 2.051, 8.091, 0.427, 0.363, 0.349, 1, 7.07, 7.195, 4.814] +params_guess = [7.2806, 1.8748, 1.8077, 8.7478, 0.3767, 0.2974, 0.3744, 1, 7.4933, 6.7781, 4.2136] + +force_all = False +e = complex_noise(200,2) +frequencies = np.linspace(0.001, 4, 200) + +#Test each pair of frequencies +result = sweep_freq_pair(frequencies, params_guess, params_correct, e, force_all) +result.to_excel('Frequency_Study.xlsx', index=False) + +#Recall the data if I need to +# result = pd.read_excel('/Users/Student/Desktop/Summer Research 2024/Multiple Curve Fit - Which Type/Frequency Study/Frequency_Study_200.xlsx') + +#Pivot the DataFrame for the heatmap +heatmap_data = result.pivot_table(index='freq2', columns='freq1', values='') + +#Create heatmap +#For log scale! +colors = [(1, 0.439, 0), 'yellow','green', 'blue', (0.533, 0.353, 0.537)] +n_bins = 100 # Number of bins for interpolation + +cmap_name = 'custom_cmap' +custom_cmap = mcolors.LinearSegmentedColormap.from_list(cmap_name, colors, N=n_bins) + +norm = mcolors.LogNorm(vmin=heatmap_data.min().min(), vmax=heatmap_data.max().max()) +ax = myheatmap(heatmap_data, cmap=custom_cmap, norm=norm, colorbarlabel='Average Error (%)') + +#For regular +# ax = myheatmap(heatmap_data, cmap=custom_cmap, vmax=10, colorbarlabel='Average Error (%)') + +ax.set_title('NetMAP Recovery of Trimer Parameters') +ax.set_xlabel('Frequency 1 (rad/s)') +ax.set_ylabel('Frequency 2 (rad/s)') + + + diff --git a/trimer/trimer_helperfunctions.py b/trimer/trimer_helperfunctions.py new file mode 100644 index 0000000..8d80070 --- /dev/null +++ b/trimer/trimer_helperfunctions.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Aug 9 16:08:31 2022 + +@author: vhorowit +""" + +import os +import datetime +import matplotlib.pyplot as plt +import numpy as np +try: + import winsound +except: + pass + +def datestring(): + return datetime.datetime.today().strftime('%Y-%m-%d %H;%M;%S') + +## source: https://stackabuse.com/python-how-to-flatten-list-of-lists/ +def flatten(list_of_lists): + if len(list_of_lists) == 0: + return list_of_lists + if isinstance(list_of_lists[0], list): + return flatten(list_of_lists[0]) + flatten(list_of_lists[1:]) + return list_of_lists[:1] + flatten(list_of_lists[1:]) + + +def listlength(list1): + try: + length = len(list1) + except TypeError: + length = 1 + return length + +def printtime(repeats, before, after, dobeep = True): + print('Ran ' + str(repeats) + ' times in ' + str(round(after-before,3)) + ' sec') + if dobeep: + beep() + +""" vh is often complex but its imaginary part is actually zero, so let's store it as a real list of vectors instead """ +def make_real_iff_real(vh): + vhr = [] # real list of vectors + for vect in vh: + vhr.append([v.real for v in vect if v.imag == 0]) # make real if and only if real + return (np.array(vhr)) + +""" Store parameters extracted from SVD """ +def store_params(M1, M2, M3, B1, B2, B3, K1, K2, K3, K4, FD): + params = [M1, M2, M3, B1, B2, B3, K1, K2, K3, K4, FD] + return params + +def read_params(vect): + [M1, M2, M3, B1, B2, B3, K1, K2, K3, K4, FD] = vect + return [M1, M2, M3, B1, B2, B3, K1, K2, K3, K4, FD] + +def savefigure(savename): + try: + plt.savefig(savename + '.svg', dpi = 600, bbox_inches='tight', transparent=True) + except: + print('Could not save svg') + try: + plt.savefig(savename + '.pdf', dpi = 600, bbox_inches='tight', transparent=True) + # transparent true source: https://jonathansoma.com/lede/data-studio/matplotlib/exporting-from-matplotlib-to-open-in-adobe-illustrator/ + except: + print('Could not save pdf') + plt.savefig(savename + '.png', dpi = 600, bbox_inches='tight', transparent=True) + print("Saved:\n", savename + '.png') + + +def calc_error_interval(resultsdf, resultsdfmean, groupby, fractionofdata = .95): + for column in ['E_lower_1D', 'E_upper_1D','E_lower_2D', 'E_upper_2D','E_lower_3D', 'E_upper_3D']: + resultsdfmean[column] = np.nan + dimensions = ['1D', '2D', '3D'] + items = resultsdfmean[groupby].unique() + + for item in items: + for D in dimensions: + avgerr = resultsdf[resultsdf[groupby]== item]['avgsyserr%_' + D] + avgerr = np.sort(avgerr) + halfalpha = (1 - fractionofdata)/2 + ## literally select the 95% fraction by tossing out the top 2.5% and the bottom 2.5% + ## For 95%, It's ideal if I do 40*N measurements for some integer N. + lowerbound = np.mean([avgerr[int(np.floor(halfalpha*len(avgerr)))], avgerr[int(np.ceil(halfalpha*len(avgerr)))]]) + upperbound = np.mean([avgerr[-int(np.floor(halfalpha*len(avgerr))+1)],avgerr[-int(np.ceil(halfalpha*len(avgerr))+1)]]) + resultsdfmean.loc[resultsdfmean[groupby]== item,'E_lower_'+ D] = lowerbound + resultsdfmean.loc[resultsdfmean[groupby]== item,'E_upper_' + D] = upperbound + return resultsdf, resultsdfmean + +def beep(): + try: + winsound.PlaySound(r'C:\Windows\Media\Speech Disambiguation.wav', flags = winsound.SND_ASYNC) + return + except: + pass + try: + winsound.PlaySound("SystemHand", winsound.SND_ALIAS) + return + except: + pass + try: + winsound.Beep(450,150) + return + except: + pass \ No newline at end of file diff --git a/trimer/trimer_resonatorphysics.py b/trimer/trimer_resonatorphysics.py new file mode 100644 index 0000000..ccff283 --- /dev/null +++ b/trimer/trimer_resonatorphysics.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Aug 9 16:07:09 2022 + +@author: vhorowit +""" + +''' Changes by lydiabullock. Adapting resonatorphysics to work for the Trimer functions and parameters. + I believe I only changed line 49 for now. ''' + +from trimer_helperfunctions import read_params +import numpy as np +import math + + +def complexamp(A,phi): + return A * np.exp(1j*phi) + +def amp(a,b): + return np.sqrt(a**2 + b**2) + +def A_from_Z(Z): # calculate amplitude of complex number + return amp(Z.real, Z.imag) + +# For driven, damped oscillator: res_freq = sqrt(k/m - b^2/(2m^2)) +# Note: Requires b < sqrt(2mk) to be significantly underdamped +# Otherwise there is no resonant frequency and we get an err from the negative number under the square root +# This works for monomer and for weak coupling. It does not work for strong coupling. +# Uses privilege. See also res_freq_numeric() +def res_freq_weak_coupling(k, m, b): + try: + w = math.sqrt(k/m - (b*b)/(2*m*m)) + except: + w = np.nan + print('no resonance frequency for k=', k, ', m=', m, ' b=', b) + return w + +## source: https://en.wikipedia.org/wiki/Q_factor#Mechanical_systems +# Does not work for strong coupling. +def approx_Q(k, m, b): + return math.sqrt(m*k)/b + +# Approximate width of Lorentzian peak. +# Does not work for strong coupling. +def approx_width(k, m, b): + return res_freq_weak_coupling(k, m, b) / approx_Q(k, m, b) + +def calcnarrowerW(vals_set, MONOMER): + [k1_set, k2_set, k3_set, k4_set, b1_set, b2_set, b3_set, F_set, m1_set, m2_set, m3_set] = read_params(vals_set) + W1=approx_width(k1_set, m1_set, b1_set) + if MONOMER: + narrowerW = W1 + else: + W2=approx_width(k2_set, m2_set, b2_set) + narrowerW = min(W1,W2) + return narrowerW + + +