diff --git a/404.html b/404.html index c71c457a5..e7f08a012 100644 --- a/404.html +++ b/404.html @@ -9,13 +9,13 @@ - - + +
Skip to main content

Page Not Found

We could not find what you were looking for.

Please contact the owner of the site that linked you to the original URL and let them know their link is broken.

- - + + \ No newline at end of file diff --git a/assets/js/08ab4e0e.7aac948a.js b/assets/js/08ab4e0e.7aac948a.js deleted file mode 100644 index a2389c347..000000000 --- a/assets/js/08ab4e0e.7aac948a.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkuc_2_docs=self.webpackChunkuc_2_docs||[]).push([[7830],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>f});var n=r(67294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var l=n.createContext({}),c=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},p=function(e){var t=c(e.components);return n.createElement(l.Provider,{value:t},e.children)},d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),u=c(r),f=a,m=u["".concat(l,".").concat(f)]||u[f]||d[f]||o;return r?n.createElement(m,i(i({ref:t},p),{},{components:r})):n.createElement(m,i({ref:t},p))}));function f(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,i=new Array(o);i[0]=u;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s.mdxType="string"==typeof e?e:a,i[1]=s;for(var c=2;c{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>c});var n=r(87462),a=(r(67294),r(3905));const o={id:"SoftwareTutorial",title:"HIK-Camera Software Installation"},i=void 0,s={unversionedId:"Toolboxes/DiscoveryInterferometer/SoftwareTutorial",id:"Toolboxes/DiscoveryInterferometer/SoftwareTutorial",title:"HIK-Camera Software Installation",description:"Install MVS App for Camera Utilization",source:"@site/docs/01_Toolboxes/03_DiscoveryInterferometer/Camera_Software_tutorial.md",sourceDirName:"01_Toolboxes/03_DiscoveryInterferometer",slug:"/Toolboxes/DiscoveryInterferometer/SoftwareTutorial",permalink:"/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial",draft:!1,tags:[],version:"current",frontMatter:{id:"SoftwareTutorial",title:"HIK-Camera Software Installation"},sidebar:"tutorialSidebar",previous:{title:"Tutorial",permalink:"/docs/Toolboxes/DiscoveryInterferometer/picturedTutorial"},next:{title:"Building The CourseBOX",permalink:"/docs/Toolboxes/DiscoveryDiffraction/"}},l={},c=[{value:"Install MVS App for Camera Utilization",id:"install-mvs-app-for-camera-utilization",level:3}],p={toc:c};function d(e){let{components:t,...o}=e;return(0,a.kt)("wrapper",(0,n.Z)({},p,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h3",{id:"install-mvs-app-for-camera-utilization"},"Install MVS App for Camera Utilization"),(0,a.kt)("p",null,"Camera model: MV-CE060-10UC. Visit the HIKROBOTICS website and download the MVS software suitable for your computer. Below are steps exemplifying the software installation for Mac."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(61665).Z,width:"1440",height:"900"})),(0,a.kt)("p",null,"Install the downloaded file."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(42689).Z,width:"689",height:"440"})),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(10229).Z,width:"390",height:"227"})),(0,a.kt)("p",null,"Open the MVS Software."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(90149).Z,width:"723",height:"304"})),(0,a.kt)("p",null,"You should see the following window."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(1069).Z,width:"1440",height:"900"})),(0,a.kt)("p",null,"Connect the camera. Refresh the USB line to detect the camera."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(33954).Z,width:"281",height:"714"}),"\n",(0,a.kt)("img",{src:r(15956).Z,width:"283",height:"727"})),(0,a.kt)("p",null,"Select the make-link button on the detected camera."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(2847).Z,width:"1433",height:"782"})),(0,a.kt)("p",null,"The following window should be displayed."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(34327).Z,width:"1440",height:"814"})),(0,a.kt)("p",null,"Click on the play button in the actions bar of the camera."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(55602).Z,width:"1439",height:"810"})),(0,a.kt)("p",null,"If properly connected, you should see a real-time image. Adjust the exposure if the image is overexposed."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(98479).Z,width:"1440",height:"811"})),(0,a.kt)("p",null,"To adjust the exposure time, go to the Feature tree, select the Acquisition Control Category, and change the Exposure Auto option to ",(0,a.kt)("em",{parentName:"p"},"Continuous"),"."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(909).Z,width:"1439",height:"810"})),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(84843).Z,width:"1439",height:"812"})),(0,a.kt)("p",null,"Now, a clear image with good contrast should be visible."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(67045).Z,width:"1440",height:"900"})),(0,a.kt)("p",null,"To stop recording, click on the stop button in the camera's actions bar."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(82136).Z,width:"634",height:"187"})),(0,a.kt)("p",null,"To disconnect the camera, click on the break-link button next to the detected camera in the USB devices list."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(28549).Z,width:"285",height:"733"})))}d.isMDXComponent=!0},67045:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image113-6c4b77dfc45355a4ef324656dac31502.png"},84843:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image129-4f71a03d2b6a5d75585d6ce6d189223f.png"},90149:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image131-997cd80d27918c66e07555af8be23e88.png"},2847:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image136-9eb875032f69230d570ffcca5c57e7b0.png"},61665:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image140-bbc9f53af8c4c2fb982a5143789c2707.png"},55602:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image141-86cff7685fcf78cb05639435ff2e0e73.png"},909:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image142-aecd3726df5340bb77f4dfeebb3d557c.png"},28549:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image21-63c3d0d0ebc55d0e88a127a1e755edfa.png"},10229:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image25-4ae60dae3bc4f653c49e496a62ea85f4.png"},82136:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image26-7e6b077456b129a0fe37934193cfe998.png"},34327:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image27-b3609cd94422dfd225deb163d1fd2bc4.png"},98479:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image32-95d76f245d22cf7b906d42438e6a4271.png"},1069:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image44-e645c97a8fdf85adc4cc8c6c66fe3806.png"},42689:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image50-90975eeebbd10cc4cbdec55d9bc94261.png"},15956:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image53-35f55be98a45eee96388a2448a025340.png"},33954:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image71-4a78fac91fcf6202ddf43ed76e67ee48.png"}}]); \ No newline at end of file diff --git a/assets/js/08ab4e0e.b0abf72d.js b/assets/js/08ab4e0e.b0abf72d.js new file mode 100644 index 000000000..f1a3aa9a6 --- /dev/null +++ b/assets/js/08ab4e0e.b0abf72d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkuc_2_docs=self.webpackChunkuc_2_docs||[]).push([[7830],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>m});var n=r(67294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var c=n.createContext({}),l=function(e){var t=n.useContext(c),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},p=function(e){var t=l(e.components);return n.createElement(c.Provider,{value:t},e.children)},d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,c=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),f=l(r),m=a,u=f["".concat(c,".").concat(m)]||f[m]||d[m]||o;return r?n.createElement(u,i(i({ref:t},p),{},{components:r})):n.createElement(u,i({ref:t},p))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,i=new Array(o);i[0]=f;var s={};for(var c in t)hasOwnProperty.call(t,c)&&(s[c]=t[c]);s.originalType=e,s.mdxType="string"==typeof e?e:a,i[1]=s;for(var l=2;l{r.r(t),r.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>l});var n=r(87462),a=(r(67294),r(3905));const o={id:"SoftwareTutorial",title:"HIK-Camera Software Installation"},i=void 0,s={unversionedId:"Toolboxes/DiscoveryInterferometer/SoftwareTutorial",id:"Toolboxes/DiscoveryInterferometer/SoftwareTutorial",title:"HIK-Camera Software Installation",description:"Install MVS App for Camera Utilization",source:"@site/docs/01_Toolboxes/03_DiscoveryInterferometer/Camera_Software_tutorial.md",sourceDirName:"01_Toolboxes/03_DiscoveryInterferometer",slug:"/Toolboxes/DiscoveryInterferometer/SoftwareTutorial",permalink:"/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial",draft:!1,tags:[],version:"current",frontMatter:{id:"SoftwareTutorial",title:"HIK-Camera Software Installation"},sidebar:"tutorialSidebar",previous:{title:"openUC2 Mach-Zender Interferometer",permalink:"/docs/Toolboxes/DiscoveryInterferometer/MachZenderInterferometer"},next:{title:"Building The CourseBOX",permalink:"/docs/Toolboxes/DiscoveryDiffraction/"}},c={},l=[{value:"Install MVS App for Camera Utilization",id:"install-mvs-app-for-camera-utilization",level:3}],p={toc:l};function d(e){let{components:t,...o}=e;return(0,a.kt)("wrapper",(0,n.Z)({},p,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h3",{id:"install-mvs-app-for-camera-utilization"},"Install MVS App for Camera Utilization"),(0,a.kt)("p",null,"Camera model: MV-CE060-10UC. Visit the HIKROBOTICS website and download the MVS software suitable for your computer. Below are steps exemplifying the software installation for Mac."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(61665).Z,width:"1440",height:"900"})),(0,a.kt)("p",null,"Install the downloaded file."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(42689).Z,width:"689",height:"440"})),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(10229).Z,width:"390",height:"227"})),(0,a.kt)("p",null,"Open the MVS Software."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(90149).Z,width:"723",height:"304"})),(0,a.kt)("p",null,"You should see the following window."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(1069).Z,width:"1440",height:"900"})),(0,a.kt)("p",null,"Connect the camera. Refresh the USB line to detect the camera."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(33954).Z,width:"281",height:"714"}),"\n",(0,a.kt)("img",{src:r(15956).Z,width:"283",height:"727"})),(0,a.kt)("p",null,"Select the make-link button on the detected camera."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(2847).Z,width:"1433",height:"782"})),(0,a.kt)("p",null,"The following window should be displayed."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(34327).Z,width:"1440",height:"814"})),(0,a.kt)("p",null,"Click on the play button in the actions bar of the camera."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(55602).Z,width:"1439",height:"810"})),(0,a.kt)("p",null,"If properly connected, you should see a real-time image. Adjust the exposure if the image is overexposed."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(98479).Z,width:"1440",height:"811"})),(0,a.kt)("p",null,"To adjust the exposure time, go to the Feature tree, select the Acquisition Control Category, and change the Exposure Auto option to ",(0,a.kt)("em",{parentName:"p"},"Continuous"),"."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(909).Z,width:"1439",height:"810"})),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(84843).Z,width:"1439",height:"812"})),(0,a.kt)("p",null,"Now, a clear image with good contrast should be visible."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(67045).Z,width:"1440",height:"900"})),(0,a.kt)("p",null,"To stop recording, click on the stop button in the camera's actions bar."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(82136).Z,width:"634",height:"187"})),(0,a.kt)("p",null,"To disconnect the camera, click on the break-link button next to the detected camera in the USB devices list."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(28549).Z,width:"285",height:"733"})))}d.isMDXComponent=!0},67045:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image113-6c4b77dfc45355a4ef324656dac31502.png"},84843:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image129-4f71a03d2b6a5d75585d6ce6d189223f.png"},90149:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image131-997cd80d27918c66e07555af8be23e88.png"},2847:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image136-9eb875032f69230d570ffcca5c57e7b0.png"},61665:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image140-bbc9f53af8c4c2fb982a5143789c2707.png"},55602:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image141-86cff7685fcf78cb05639435ff2e0e73.png"},909:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image142-aecd3726df5340bb77f4dfeebb3d557c.png"},28549:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image21-63c3d0d0ebc55d0e88a127a1e755edfa.png"},10229:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image25-4ae60dae3bc4f653c49e496a62ea85f4.png"},82136:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image26-7e6b077456b129a0fe37934193cfe998.png"},34327:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image27-b3609cd94422dfd225deb163d1fd2bc4.png"},98479:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image32-95d76f245d22cf7b906d42438e6a4271.png"},1069:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image44-e645c97a8fdf85adc4cc8c6c66fe3806.png"},42689:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image50-90975eeebbd10cc4cbdec55d9bc94261.png"},15956:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image53-35f55be98a45eee96388a2448a025340.png"},33954:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image71-4a78fac91fcf6202ddf43ed76e67ee48.png"}}]); \ No newline at end of file diff --git a/assets/js/12620db3.538c9182.js b/assets/js/12620db3.538c9182.js new file mode 100644 index 000000000..8cca2847c --- /dev/null +++ b/assets/js/12620db3.538c9182.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkuc_2_docs=self.webpackChunkuc_2_docs||[]).push([[7051],{3905:(e,t,r)=>{r.d(t,{Zo:()=>h,kt:()=>d});var n=r(67294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function i(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var l=n.createContext({}),c=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},h=function(e){var t=c(e.components);return n.createElement(l.Provider,{value:t},e.children)},m={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},p=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,i=e.originalType,l=e.parentName,h=s(e,["components","mdxType","originalType","parentName"]),p=c(r),d=a,u=p["".concat(l,".").concat(d)]||p[d]||m[d]||i;return r?n.createElement(u,o(o({ref:t},h),{},{components:r})):n.createElement(u,o({ref:t},h))}));function d(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var i=r.length,o=new Array(i);o[0]=p;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s.mdxType="string"==typeof e?e:a,o[1]=s;for(var c=2;c{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>o,default:()=>m,frontMatter:()=>i,metadata:()=>s,toc:()=>c});var n=r(87462),a=(r(67294),r(3905));const i={id:"MachZenderInterferometer",title:"openUC2 Mach-Zender Interferometer"},o=void 0,s={unversionedId:"Toolboxes/DiscoveryInterferometer/MachZenderInterferometer",id:"Toolboxes/DiscoveryInterferometer/MachZenderInterferometer",title:"openUC2 Mach-Zender Interferometer",description:"Tutorial: Mach-Zender Interferometer",source:"@site/docs/01_Toolboxes/03_DiscoveryInterferometer/04_mach-zender_interferometer.md",sourceDirName:"01_Toolboxes/03_DiscoveryInterferometer",slug:"/Toolboxes/DiscoveryInterferometer/MachZenderInterferometer",permalink:"/docs/Toolboxes/DiscoveryInterferometer/MachZenderInterferometer",draft:!1,tags:[],version:"current",sidebarPosition:4,frontMatter:{id:"MachZenderInterferometer",title:"openUC2 Mach-Zender Interferometer"},sidebar:"tutorialSidebar",previous:{title:"openUC2 Michelson Interferometer",permalink:"/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometer"},next:{title:"HIK-Camera Software Installation",permalink:"/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial"}},l={},c=[{value:"Tutorial: Mach-Zender Interferometer",id:"tutorial-mach-zender-interferometer",level:2},{value:"Materials needed:",id:"materials-needed",level:3},{value:"Instructions for assembling the Mach-Zender Interferometer:",id:"instructions-for-assembling-the-mach-zender-interferometer",level:3},{value:"First Tests with Modifications to the Original Setup",id:"first-tests-with-modifications-to-the-original-setup",level:3},{value:"Result of Phase Unwrapping",id:"result-of-phase-unwrapping",level:3}],h={toc:c};function m(e){let{components:t,...i}=e;return(0,a.kt)("wrapper",(0,n.Z)({},h,i,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"tutorial-mach-zender-interferometer"},"Tutorial: Mach-Zender Interferometer"),(0,a.kt)("h3",{id:"materials-needed"},"Materials needed:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Laser diode"),(0,a.kt)("li",{parentName:"ul"},"Hikrobot Camera (MV-CE060-10UC) with USB cable (",(0,a.kt)("a",{parentName:"li",href:"/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial"},"Hikrobot Camera Software installation"),")."),(0,a.kt)("li",{parentName:"ul"},"Small stage with gear."),(0,a.kt)("li",{parentName:"ul"},"Two kinematic mirrors (in cubes)."),(0,a.kt)("li",{parentName:"ul"},"Two beam splitters in cube."),(0,a.kt)("li",{parentName:"ul"},"Sample holder (in cube)."),(0,a.kt)("li",{parentName:"ul"},"Two empty cubes."),(0,a.kt)("li",{parentName:"ul"},"Base plates."),(0,a.kt)("li",{parentName:"ul"},"Screen."),(0,a.kt)("li",{parentName:"ul"},"Pinhole in cube."),(0,a.kt)("li",{parentName:"ul"},"Screwdriver to adjust alignment (1,5x60)"),(0,a.kt)("li",{parentName:"ul"},"Two 100 mm converging lenses.")),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(8143).Z,width:"2048",height:"921"})),(0,a.kt)("h3",{id:"instructions-for-assembling-the-mach-zender-interferometer"},"Instructions for assembling the Mach-Zender Interferometer:"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 1: Build the base plate configuration")),(0,a.kt)("p",null,"Build the base plate configuration as shown. Note: At this point the laser diode should be turned off the whole time. Don't look at the laser directly. Always use screens to look for the laser light."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(36547).Z,width:"2048",height:"921"})),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 2: Align the laser diode with the pinhole")),(0,a.kt)("p",null,"Place the laser diode, an empty cube, and a 100 mm convergent lens in a straight line. Then, place the pinhole two cube units from the lens and place the screen after the pinhole. Turn the laser on and align it using by using the screws to center the beam with the pinhole."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(92826).Z,width:"2048",height:"921"})),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 3: Check beam collimation")),(0,a.kt)("p",null,"Check if the beam is collimated by placing the screen at different distances. The beam diameter should stay relatively the same size. If it is not the same size, this means that the distance between the laser and the lens should be adjusted. Turn the laser off."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(79319).Z,width:"2048",height:"921"}),"\n",(0,a.kt)("img",{src:r(71057).Z,width:"2048",height:"921"})),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 4: Set up the beam splitter and mirror")),(0,a.kt)("p",null,"Place the beam splitter and the kinematic mirror as shown. Place the pinhole two cube units away from the mirror and the screen behind it. Turn the laser on and align the kinematic mirror using the screws. Once it's done, turn the laser off."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(47536).Z,width:"2048",height:"921"})),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 5: Adjust the microscope objective and lens")),(0,a.kt)("p",null,"Place the microscope objective, followed by an empty cube and the 100 mm lens. You should adjust the distance between the objective and the 100 mm lens so that the beam is collimated after going through both. Place the screen after the lens. Turn the laser on and check the collimation. Adjust the distance as necessary. Turn the laser off."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(41455).Z,width:"2048",height:"921"}),"\n",(0,a.kt)("img",{src:r(58692).Z,width:"2048",height:"921"}),"\n",(0,a.kt)("img",{src:r(92290).Z,width:"2048",height:"921"})),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 6: Setup and alignment")),(0,a.kt)("p",null,"Place the camera on the sample arm as shown. Put the screen on the other arm exit. Place the sample holder using one half of the cube at a time to not collide with the microscope objective."),(0,a.kt)("p",null,"Turn the laser on and use the screen to align both beams using the screws on the reference mirror."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(94246).Z,width:"2048",height:"921"}),"\n",(0,a.kt)("img",{src:r(92299).Z,width:"2048",height:"921"})),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 7: Connect and adjust in the MVS app")),(0,a.kt)("p",null,"Connect the camera to the computer and open the MVS app. Block the reference beam. Move the coverslide such that your sample enters the FoV (Field of View). Unblock the reference beam. Zoom into the image to distinguish the fringe pattern in the MVS camera display. Adjust the angles of the reference mirror using the screws to change the fringe pattern as shown."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(70089).Z,width:"1440",height:"900"})),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 7: Data processing")),(0,a.kt)("p",null,"Process the data. Phase unwrapping possible."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(36625).Z,width:"1440",height:"648"})),(0,a.kt)("h3",{id:"first-tests-with-modifications-to-the-original-setup"},"First Tests with Modifications to the Original Setup"),(0,a.kt)("p",null,"Using Lei code, the need of a linear stage for the sample was identified. Adjusting the objective and tube lens enhances the interference, making it crucial to use the ImSwitch interface to see the FFT in real time and optimize. The final goal is to move the position of the first order interference to use Lei algorithm (or some Phase unwrapping algorithm) to retrieve the Phase. To achieve this, two images need to be acquired: a sample image and a background image (without a cover slide or a slide region with no specimen)."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(62832).Z,width:"2048",height:"1365"})),(0,a.kt)("h3",{id:"result-of-phase-unwrapping"},"Result of Phase Unwrapping"),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(18299).Z,width:"753",height:"536"})))}m.isMDXComponent=!0},92826:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image101-78dcb7aadbffeba203b5242c46d7538e.jpg"},8143:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image111-3c629a36465765d21c97c77cb1c5d92b.jpg"},79319:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image112-6cb6ee23f062549655427d7e87daea68.jpg"},92299:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image116-494b16efef940489bf2b42e6deab22f1.jpg"},71057:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image124-a6c097fe702b4781487a9a3273a53322.jpg"},47536:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image132-64336325ef2d82c30ecbcf8c3bed3f2c.jpg"},62832:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image133-420566b56fcd11332e7d9fa2f8d9e8fd.png"},41455:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image137-0387100181ab1222243b254c83f43ed7.jpg"},70089:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image147-66e4bb8cf4a7404038d92c95adfef775.png"},92290:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image17-ec3ad40dbdcb8f4f2cfd55bb7e0cea87.jpg"},18299:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image72-2ba90b21dcac4f92fb565a3f864fb5c7.png"},36547:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image78-a6fdea62b40074e7fe1ba2ed2fb1fd32.jpg"},58692:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image79-2700bacd0bc88f2c2dace4110cadf25c.jpg"},94246:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image85-ab034dbc945b60fefad88f640db95946.jpg"},36625:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image99-9727c9447c025b29ec98c59d54802ead.png"}}]); \ No newline at end of file diff --git a/assets/js/12620db3.b657dd70.js b/assets/js/12620db3.b657dd70.js deleted file mode 100644 index 897b62a57..000000000 --- a/assets/js/12620db3.b657dd70.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkuc_2_docs=self.webpackChunkuc_2_docs||[]).push([[7051],{3905:(e,t,r)=>{r.d(t,{Zo:()=>h,kt:()=>m});var n=r(67294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function i(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var l=n.createContext({}),c=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},h=function(e){var t=c(e.components);return n.createElement(l.Provider,{value:t},e.children)},p={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,i=e.originalType,l=e.parentName,h=s(e,["components","mdxType","originalType","parentName"]),d=c(r),m=a,u=d["".concat(l,".").concat(m)]||d[m]||p[m]||i;return r?n.createElement(u,o(o({ref:t},h),{},{components:r})):n.createElement(u,o({ref:t},h))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var i=r.length,o=new Array(i);o[0]=d;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s.mdxType="string"==typeof e?e:a,o[1]=s;for(var c=2;c{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>o,default:()=>p,frontMatter:()=>i,metadata:()=>s,toc:()=>c});var n=r(87462),a=(r(67294),r(3905));const i={id:"picturedTutorial",title:"Tutorial"},o=void 0,s={unversionedId:"Toolboxes/DiscoveryInterferometer/picturedTutorial",id:"Toolboxes/DiscoveryInterferometer/picturedTutorial",title:"Tutorial",description:"Tutorial: Mach-Zender Interferometer",source:"@site/docs/01_Toolboxes/03_DiscoveryInterferometer/04_mach-zender_interferometer.md",sourceDirName:"01_Toolboxes/03_DiscoveryInterferometer",slug:"/Toolboxes/DiscoveryInterferometer/picturedTutorial",permalink:"/docs/Toolboxes/DiscoveryInterferometer/picturedTutorial",draft:!1,tags:[],version:"current",sidebarPosition:4,frontMatter:{id:"picturedTutorial",title:"Tutorial"},sidebar:"tutorialSidebar",previous:{title:"openUC2 Michelson Interferometer",permalink:"/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometer"},next:{title:"HIK-Camera Software Installation",permalink:"/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial"}},l={},c=[{value:"Tutorial: Mach-Zender Interferometer",id:"tutorial-mach-zender-interferometer",level:2},{value:"Materials needed:",id:"materials-needed",level:3},{value:"Instructions for assembling the Mach-Zender Interferometer:",id:"instructions-for-assembling-the-mach-zender-interferometer",level:3},{value:"First Tests with Modifications to the Original Setup",id:"first-tests-with-modifications-to-the-original-setup",level:3},{value:"Result of Phase Unwrapping",id:"result-of-phase-unwrapping",level:3}],h={toc:c};function p(e){let{components:t,...i}=e;return(0,a.kt)("wrapper",(0,n.Z)({},h,i,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"tutorial-mach-zender-interferometer"},"Tutorial: Mach-Zender Interferometer"),(0,a.kt)("h3",{id:"materials-needed"},"Materials needed:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Laser diode"),(0,a.kt)("li",{parentName:"ul"},"Hikrobot Camera (MV-CE060-10UC) with USB cable (",(0,a.kt)("a",{parentName:"li",href:"/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial"},"Hikrobot Camera Software installation"),")."),(0,a.kt)("li",{parentName:"ul"},"Small stage with gear."),(0,a.kt)("li",{parentName:"ul"},"Two kinematic mirrors (in cubes)."),(0,a.kt)("li",{parentName:"ul"},"Two beam splitters in cube."),(0,a.kt)("li",{parentName:"ul"},"Sample holder (in cube)."),(0,a.kt)("li",{parentName:"ul"},"Two empty cubes."),(0,a.kt)("li",{parentName:"ul"},"Base plates."),(0,a.kt)("li",{parentName:"ul"},"Screen."),(0,a.kt)("li",{parentName:"ul"},"Pinhole in cube."),(0,a.kt)("li",{parentName:"ul"},"Screwdriver to adjust alignment (1,5x60)"),(0,a.kt)("li",{parentName:"ul"},"Two 100 mm converging lenses.")),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(8143).Z,width:"2048",height:"921"})),(0,a.kt)("h3",{id:"instructions-for-assembling-the-mach-zender-interferometer"},"Instructions for assembling the Mach-Zender Interferometer:"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 1: Build the base plate configuration")),(0,a.kt)("p",null,"Build the base plate configuration as shown. Note: At this point the laser diode should be turned off the whole time. Don't look at the laser directly. Always use screens to look for the laser light."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(36547).Z,width:"2048",height:"921"})),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 2: Align the laser diode with the pinhole")),(0,a.kt)("p",null,"Place the laser diode, an empty cube, and a 100 mm convergent lens in a straight line. Then, place the pinhole two cube units from the lens and place the screen after the pinhole. Turn the laser on and align it using by using the screws to center the beam with the pinhole."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(92826).Z,width:"2048",height:"921"})),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 3: Check beam collimation")),(0,a.kt)("p",null,"Check if the beam is collimated by placing the screen at different distances. The beam diameter should stay relatively the same size. If it is not the same size, this means that the distance between the laser and the lens should be adjusted. Turn the laser off."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(79319).Z,width:"2048",height:"921"}),"\n",(0,a.kt)("img",{src:r(71057).Z,width:"2048",height:"921"})),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 4: Set up the beam splitter and mirror")),(0,a.kt)("p",null,"Place the beam splitter and the kinematic mirror as shown. Place the pinhole two cube units away from the mirror and the screen behind it. Turn the laser on and align the kinematic mirror using the screws. Once it's done, turn the laser off."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(47536).Z,width:"2048",height:"921"})),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 5: Adjust the microscope objective and lens")),(0,a.kt)("p",null,"Place the microscope objective, followed by an empty cube and the 100 mm lens. You should adjust the distance between the objective and the 100 mm lens so that the beam is collimated after going through both. Place the screen after the lens. Turn the laser on and check the collimation. Adjust the distance as necessary. Turn the laser off."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(41455).Z,width:"2048",height:"921"}),"\n",(0,a.kt)("img",{src:r(58692).Z,width:"2048",height:"921"}),"\n",(0,a.kt)("img",{src:r(92290).Z,width:"2048",height:"921"})),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 6: Setup and alignment")),(0,a.kt)("p",null,"Place the camera on the sample arm as shown. Put the screen on the other arm exit. Place the sample holder using one half of the cube at a time to not collide with the microscope objective."),(0,a.kt)("p",null,"Turn the laser on and use the screen to align both beams using the screws on the reference mirror."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(94246).Z,width:"2048",height:"921"}),"\n",(0,a.kt)("img",{src:r(92299).Z,width:"2048",height:"921"})),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 7: Connect and adjust in the MVS app")),(0,a.kt)("p",null,"Connect the camera to the computer and open the MVS app. Block the reference beam. Move the coverslide such that your sample enters the FoV (Field of View). Unblock the reference beam. Zoom into the image to distinguish the fringe pattern in the MVS camera display. Adjust the angles of the reference mirror using the screws to change the fringe pattern as shown."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(70089).Z,width:"1440",height:"900"})),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"Step 7: Data processing")),(0,a.kt)("p",null,"Process the data. Phase unwrapping possible."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(36625).Z,width:"1440",height:"648"})),(0,a.kt)("h3",{id:"first-tests-with-modifications-to-the-original-setup"},"First Tests with Modifications to the Original Setup"),(0,a.kt)("p",null,"Using Lei code, the need of a linear stage for the sample was identified. Adjusting the objective and tube lens enhances the interference, making it crucial to use the ImSwitch interface to see the FFT in real time and optimize. The final goal is to move the position of the first order interference to use Lei algorithm (or some Phase unwrapping algorithm) to retrieve the Phase. To achieve this, two images need to be acquired: a sample image and a background image (without a cover slide or a slide region with no specimen)."),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(62832).Z,width:"2048",height:"1365"})),(0,a.kt)("h3",{id:"result-of-phase-unwrapping"},"Result of Phase Unwrapping"),(0,a.kt)("p",null,(0,a.kt)("img",{src:r(18299).Z,width:"753",height:"536"})))}p.isMDXComponent=!0},92826:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image101-78dcb7aadbffeba203b5242c46d7538e.jpg"},8143:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image111-3c629a36465765d21c97c77cb1c5d92b.jpg"},79319:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image112-6cb6ee23f062549655427d7e87daea68.jpg"},92299:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image116-494b16efef940489bf2b42e6deab22f1.jpg"},71057:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image124-a6c097fe702b4781487a9a3273a53322.jpg"},47536:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image132-64336325ef2d82c30ecbcf8c3bed3f2c.jpg"},62832:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image133-420566b56fcd11332e7d9fa2f8d9e8fd.png"},41455:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image137-0387100181ab1222243b254c83f43ed7.jpg"},70089:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image147-66e4bb8cf4a7404038d92c95adfef775.png"},92290:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image17-ec3ad40dbdcb8f4f2cfd55bb7e0cea87.jpg"},18299:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image72-2ba90b21dcac4f92fb565a3f864fb5c7.png"},36547:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image78-a6fdea62b40074e7fe1ba2ed2fb1fd32.jpg"},58692:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image79-2700bacd0bc88f2c2dace4110cadf25c.jpg"},94246:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image85-ab034dbc945b60fefad88f640db95946.jpg"},36625:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/image99-9727c9447c025b29ec98c59d54802ead.png"}}]); \ No newline at end of file diff --git a/assets/js/935f2afb.c658e6ed.js b/assets/js/935f2afb.c658e6ed.js new file mode 100644 index 000000000..9e1a7c54e --- /dev/null +++ b/assets/js/935f2afb.c658e6ed.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkuc_2_docs=self.webpackChunkuc_2_docs||[]).push([[53],{1109:e=>{e.exports=JSON.parse('{"pluginId":"default","version":"current","label":"Next","banner":null,"badge":false,"noIndex":false,"className":"docs-version-current","isLast":true,"docsSidebars":{"tutorialSidebar":[{"type":"category","label":"Educational Kits","collapsible":true,"collapsed":true,"items":[{"type":"category","label":"DiscoveryCore","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Optics and Imaging for Medical Photonics Students","href":"/docs/Toolboxes/DiscoveryCore/Opticsintro","docId":"Toolboxes/DiscoveryCore/Opticsintro"},{"type":"link","label":"openUC2 Smartphone Microscope with a finite corrected objective lens","href":"/docs/Toolboxes/DiscoveryCore/Smartphone Microscope","docId":"Toolboxes/DiscoveryCore/Smartphone Microscope"},{"type":"category","label":"CHINESE","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"uc2miniboxCN","href":"/docs/Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCN","docId":"Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCN"}]},{"type":"category","label":"ENGLISH","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"UC2 miniBOX (english)","href":"/docs/Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxEN","docId":"Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxEN"},{"type":"link","label":"Lens","href":"/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreLens","docId":"Toolboxes/DiscoveryCore/ENGLISH/CoreLens"},{"type":"link","label":"Telescope","href":"/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreTelescope","docId":"Toolboxes/DiscoveryCore/ENGLISH/CoreTelescope"},{"type":"link","label":"Microscope","href":"/docs/Toolboxes/DiscoveryCore/ENGLISH/coreMicroscope","docId":"Toolboxes/DiscoveryCore/ENGLISH/coreMicroscope"},{"type":"link","label":"Tutorial","href":"/docs/Toolboxes/DiscoveryCore/ENGLISH/picturedTutorial","docId":"Toolboxes/DiscoveryCore/ENGLISH/picturedTutorial"}]},{"type":"category","label":"GERMAN","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Introduction","href":"/docs/Toolboxes/DiscoveryCore/GERMAN/CoreIntro","docId":"Toolboxes/DiscoveryCore/GERMAN/CoreIntro"},{"type":"link","label":"Linse","href":"/docs/Toolboxes/DiscoveryCore/GERMAN/CoreLinse","docId":"Toolboxes/DiscoveryCore/GERMAN/CoreLinse"},{"type":"link","label":"Teleskop","href":"/docs/Toolboxes/DiscoveryCore/GERMAN/CoreTeleskop","docId":"Toolboxes/DiscoveryCore/GERMAN/CoreTeleskop"},{"type":"link","label":"Mikroskop","href":"/docs/Toolboxes/DiscoveryCore/GERMAN/CoreMikroskop","docId":"Toolboxes/DiscoveryCore/GERMAN/CoreMikroskop"}]},{"type":"category","label":"SPANISH","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"core_intro","href":"/docs/Toolboxes/DiscoveryCore/SPANISH/core_intro","docId":"Toolboxes/DiscoveryCore/SPANISH/core_intro"}]}]},{"type":"category","label":"DiscoveryElectronics","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"openUC2 Electronics kit that enables automation","href":"/docs/Toolboxes/DiscoveryElectronics/Automation_intro","docId":"Toolboxes/DiscoveryElectronics/Automation_intro"},{"type":"link","label":"openUC2 Camera Setup","href":"/docs/Toolboxes/DiscoveryElectronics/Camera Setup","docId":"Toolboxes/DiscoveryElectronics/Camera Setup"},{"type":"link","label":"XYZ Micrometer Stage for Precise Motion Control","href":"/docs/Toolboxes/DiscoveryElectronics/XYZ_stage_mico","docId":"Toolboxes/DiscoveryElectronics/XYZ_stage_mico"},{"type":"link","label":"ESP32 XIAO Sense-based microscope","href":"/docs/Toolboxes/DiscoveryElectronics/seeedmicroscope","docId":"Toolboxes/DiscoveryElectronics/seeedmicroscope"},{"type":"link","label":"openUC2 *Spectrometer*","href":"/docs/Toolboxes/DiscoveryElectronics/spectrometer","docId":"Toolboxes/DiscoveryElectronics/spectrometer"}]},{"type":"category","label":"DiscoveryInterferometer","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"openUC2 Interferometer Introduction","href":"/docs/Toolboxes/DiscoveryInterferometer/Interferometer_intro","docId":"Toolboxes/DiscoveryInterferometer/Interferometer_intro"},{"type":"link","label":"openUC2 In-line holography","href":"/docs/Toolboxes/DiscoveryInterferometer/InlineHolography","docId":"Toolboxes/DiscoveryInterferometer/InlineHolography"},{"type":"link","label":"openUC2 Michelson Interferometer","href":"/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometer","docId":"Toolboxes/DiscoveryInterferometer/MichelsonInterferometer"},{"type":"link","label":"openUC2 Mach-Zender Interferometer","href":"/docs/Toolboxes/DiscoveryInterferometer/MachZenderInterferometer","docId":"Toolboxes/DiscoveryInterferometer/MachZenderInterferometer"},{"type":"link","label":"HIK-Camera Software Installation","href":"/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial","docId":"Toolboxes/DiscoveryInterferometer/SoftwareTutorial"}]},{"type":"category","label":"Building The CourseBOX","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"The Course BOX Alignment Procedure (Finite Optics)","href":"/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/","docId":"Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/Readme"},{"type":"link","label":"CourseBOX: Light Microscopy and Optical Alignment (Infinity Optics)","href":"/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/","docId":"Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/Readme"},{"type":"link","label":"MicroscopyCore","href":"/docs/Toolboxes/DiscoveryDiffraction/MicroscopyCore","docId":"Toolboxes/DiscoveryDiffraction/MicroscopyCore"}],"href":"/docs/Toolboxes/DiscoveryDiffraction/"},{"type":"category","label":"Polarisation Experiments","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Brewster Angle Experiment","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/Readme"},{"type":"link","label":"Circular Polarizer","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/Readme"},{"type":"link","label":"Crossed Polarizers","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/Readme"},{"type":"link","label":"Many Microscope Slides Experiment","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/Readme"},{"type":"link","label":"Newton\'s Rings Experiment","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/Readme"},{"type":"link","label":"Polarization Experiment using Optically Active Solution","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/Readme"},{"type":"link","label":"Stress Birefringence Experiment","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/Readme"},{"type":"link","label":"Three Polarizers (0, 45, 90 degrees)","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/Readme"}],"href":"/docs/Toolboxes/DiscoveryPolarization/"},{"type":"link","label":"Fluorescence Extension","href":"/docs/Toolboxes/DiscoveryFluorescence/","docId":"Toolboxes/DiscoveryFluorescence/README"}],"href":"/docs/Toolboxes/"},{"type":"category","label":"Investigator","collapsible":true,"collapsed":true,"items":[{"type":"category","label":"ZMicroscope","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Unpack the openUC2 Z-Microscope","href":"/docs/Investigator/ZMicroscope/UpackZMicroscope","docId":"Investigator/ZMicroscope/UpackZMicroscope"}]},{"type":"category","label":"XYZMicroscope","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Aligning the Beamsplitter Cube","href":"/docs/Investigator/XYZMicroscope/AlignLaser","docId":"Investigator/XYZMicroscope/AlignLaser"},{"type":"link","label":"openUC2 FiveD v1","href":"/docs/Investigator/XYZMicroscope/FiveD_v1","docId":"Investigator/XYZMicroscope/FiveD_v1"},{"type":"link","label":"openUC2 FiveD v2","href":"/docs/Investigator/XYZMicroscope/FiveD_v2","docId":"Investigator/XYZMicroscope/FiveD_v2"},{"type":"link","label":"openUC2 FiveD v3","href":"/docs/Investigator/XYZMicroscope/FiveD_v3","docId":"Investigator/XYZMicroscope/FiveD_v3"},{"type":"link","label":"Histo Scanner Plugin Documentation","href":"/docs/Investigator/XYZMicroscope/HistoScan","docId":"Investigator/XYZMicroscope/HistoScan"},{"type":"link","label":"MCT (Multi-Colour Timelapse) Imaging Plugin","href":"/docs/Investigator/XYZMicroscope/MCTPlugin","docId":"Investigator/XYZMicroscope/MCTPlugin"},{"type":"link","label":"ROI Scanner","href":"/docs/Investigator/XYZMicroscope/ROIScanner","docId":"Investigator/XYZMicroscope/ROIScanner"},{"type":"link","label":"openUC2 Phase-Contrast Setup Tutorial","href":"/docs/Investigator/XYZMicroscope/SetupPhasecontrast","docId":"Investigator/XYZMicroscope/SetupPhasecontrast"},{"type":"link","label":"openUC2 Setting up the tube lens","href":"/docs/Investigator/XYZMicroscope/SetupTubelens","docId":"Investigator/XYZMicroscope/SetupTubelens"}]},{"type":"category","label":"Lightsheet","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"openUC2 Light-Sheet Microscope","href":"/docs/Investigator/Lightsheet/LightSheet","docId":"Investigator/Lightsheet/LightSheet"},{"type":"link","label":"openUC2 Light-Sheet Microscope (Old Version)","href":"/docs/Investigator/Lightsheet/LightSheetOld","docId":"Investigator/Lightsheet/LightSheetOld"},{"type":"link","label":"openUC2 Light-Sheet Tips and Tricks","href":"/docs/Investigator/Lightsheet/LightSheet Sample","docId":"Investigator/Lightsheet/LightSheet Sample"},{"type":"link","label":"Light-sheet alignment","href":"/docs/Investigator/Lightsheet/LightsheetCalibration","docId":"Investigator/Lightsheet/LightsheetCalibration"}]},{"type":"category","label":"STORM","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"U.C.*STORM*","href":"/docs/Investigator/STORM/Main","docId":"Investigator/STORM/Main"},{"type":"link","label":"Setting up the laser","href":"/docs/Investigator/STORM/Illumination","docId":"Investigator/STORM/Illumination"},{"type":"link","label":"Stability","href":"/docs/Investigator/STORM/Stability","docId":"Investigator/STORM/Stability"},{"type":"link","label":"Software","href":"/docs/Investigator/STORM/Software","docId":"Investigator/STORM/Software"},{"type":"link","label":"Electronics","href":"/docs/Investigator/STORM/Electronics","docId":"Investigator/STORM/Electronics"},{"type":"link","label":"Results","href":"/docs/Investigator/STORM/Results","docId":"Investigator/STORM/Results"}]}]},{"type":"category","label":"Electronics","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Introduction","href":"/docs/Electronics/uc2e1","docId":"Electronics/uc2e1"},{"type":"link","label":"Hardware","href":"/docs/Electronics/uc2e2","docId":"Electronics/uc2e2"},{"type":"link","label":"Getting Started","href":"/docs/Electronics/uc2e3","docId":"Electronics/uc2e3"},{"type":"link","label":"REST principle","href":"/docs/Electronics/uc2e5","docId":"Electronics/uc2e5"},{"type":"link","label":"REST commands","href":"/docs/Electronics/uc2e5.1","docId":"Electronics/uc2e5.1"},{"type":"link","label":"Connecting devices","href":"/docs/Electronics/uc2e6","docId":"Electronics/uc2e6"},{"type":"link","label":"Controlling the UC2e","href":"/docs/Electronics/uc2e7","docId":"Electronics/uc2e7"},{"type":"link","label":"Compiling from Scratch","href":"/docs/Electronics/uc2e8","docId":"Electronics/uc2e8"},{"type":"link","label":"Replace Hardware","href":"/docs/Electronics/uc2e9","docId":"Electronics/uc2e9"},{"type":"link","label":"PS4-Controller","href":"/docs/Electronics/PS4-Controller","docId":"Electronics/PS4-Controller"},{"type":"link","label":"Python commands","href":"/docs/Electronics/uc2e5.2","docId":"Electronics/uc2e5.2"},{"type":"category","label":"APIDescription","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"UC2-ESP","href":"/docs/Electronics/APIDescription/INTRO","docId":"Electronics/APIDescription/INTRO"},{"type":"link","label":"AS 5311 linear encoder for real-time feedback loop","href":"/docs/Electronics/APIDescription/Encoder","docId":"Electronics/APIDescription/Encoder"},{"type":"link","label":"Home","href":"/docs/Electronics/APIDescription/Home","docId":"Electronics/APIDescription/Home"},{"type":"link","label":"LED array","href":"/docs/Electronics/APIDescription/LEDArray","docId":"Electronics/APIDescription/LEDArray"},{"type":"link","label":"Motor","href":"/docs/Electronics/APIDescription/Motor","docId":"Electronics/APIDescription/Motor"},{"type":"link","label":"PinConfig","href":"/docs/Electronics/APIDescription/PinConfig","docId":"Electronics/APIDescription/PinConfig"}]},{"type":"category","label":"UC2-REST","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"UC2-REST","href":"/docs/Electronics/UC2-REST/INTRO","docId":"Electronics/UC2-REST/INTRO"},{"type":"link","label":"UC2-REST: Messaging","href":"/docs/Electronics/UC2-REST/ESP32_Messaging_Callback","docId":"Electronics/UC2-REST/ESP32_Messaging_Callback"},{"type":"link","label":"UC2-REST: Motor","href":"/docs/Electronics/UC2-REST/ESP32_Motor","docId":"Electronics/UC2-REST/ESP32_Motor"}]}]},{"type":"category","label":"ImSwitch","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Install driver for Daheng Camera","href":"/docs/ImSwitch/DahengCamera","docId":"ImSwitch/DahengCamera"},{"type":"link","label":"ImSwitchClient Documentation","href":"/docs/ImSwitch/ImSwitchClient","docId":"ImSwitch/ImSwitchClient"},{"type":"link","label":"ImSwitchConfig","href":"/docs/ImSwitch/ImSwitchConfig","docId":"ImSwitch/ImSwitchConfig"},{"type":"link","label":"Install ImSwitch","href":"/docs/ImSwitch/ImSwitchInstall","docId":"ImSwitch/ImSwitchInstall"},{"type":"link","label":"ImSwitchInstallUbuntu","href":"/docs/ImSwitch/ImSwitchInstallUbuntu","docId":"ImSwitch/ImSwitchInstallUbuntu"},{"type":"link","label":"ImSwitchInstallWindows","href":"/docs/ImSwitch/ImSwitchInstallWindows","docId":"ImSwitch/ImSwitchInstallWindows"},{"type":"link","label":"Install ImSwitch using the ImSwitch Installer","href":"/docs/ImSwitch/ImSwitchInstaller","docId":"ImSwitch/ImSwitchInstaller"},{"type":"link","label":"ImSwitchUpdate","href":"/docs/ImSwitch/ImSwitchUpdate","docId":"ImSwitch/ImSwitchUpdate"}]},{"type":"category","label":"openUC2 Workshops","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"openUC2 Workshop at BioRTC in Nigeria","href":"/docs/WORKSHOPS/Workshop Nigeria","docId":"WORKSHOPS/Workshop Nigeria"}],"href":"/docs/WORKSHOPS/"},{"type":"category","label":"PRODUCTION","collapsible":true,"collapsed":true,"items":[{"type":"category","label":"INVESTIGATOR","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Assemble the XYZ Microscope","href":"/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscope","docId":"PRODUCTION/INVESTIGATOR/ProductionXYZMicroscope"}]},{"type":"category","label":"Modules","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"USB Camera","href":"/docs/PRODUCTION/Modules/Camera","docId":"PRODUCTION/Modules/Camera"},{"type":"link","label":"Eyepiece","href":"/docs/PRODUCTION/Modules/Eyepiece","docId":"PRODUCTION/Modules/Eyepiece"},{"type":"link","label":"LENS","href":"/docs/PRODUCTION/Modules/LENS","docId":"PRODUCTION/Modules/LENS"},{"type":"link","label":"KINEMATIC MIRROR (45\xb0)","href":"/docs/PRODUCTION/Modules/KIN_MIR_45","docId":"PRODUCTION/Modules/KIN_MIR_45"},{"type":"link","label":"KINEMATIC MIRROR (90\xb0)","href":"/docs/PRODUCTION/Modules/KIN_MIR_90","docId":"PRODUCTION/Modules/KIN_MIR_90"},{"type":"link","label":"KINEMATIC MIRROR (90\xb0)","href":"/docs/PRODUCTION/Modules/BEAMSPLITTER","docId":"PRODUCTION/Modules/BEAMSPLITTER"},{"type":"link","label":"Mirror (45\xb0)","href":"/docs/PRODUCTION/Modules/MIR_45","docId":"PRODUCTION/Modules/MIR_45"},{"type":"link","label":"Kinematic XY Mount","href":"/docs/PRODUCTION/Modules/KIN_XY_LASER","docId":"PRODUCTION/Modules/KIN_XY_LASER"},{"type":"link","label":"Kinematic XY Mount / Laser","href":"/docs/PRODUCTION/Modules/STAGE_Z_MANUAL","docId":"PRODUCTION/Modules/STAGE_Z_MANUAL"},{"type":"link","label":"Z-Stage Motorized NEMA12 25mm","href":"/docs/PRODUCTION/Modules/STAGE_Z_NEMA","docId":"PRODUCTION/Modules/STAGE_Z_NEMA"},{"type":"link","label":"Torch","href":"/docs/PRODUCTION/Modules/TORCH","docId":"PRODUCTION/Modules/TORCH"},{"type":"link","label":"Sample Holder","href":"/docs/PRODUCTION/Modules/SAMPLE_HOLDEr","docId":"PRODUCTION/Modules/SAMPLE_HOLDEr"},{"type":"link","label":"Polarization Rotator","href":"/docs/PRODUCTION/Modules/POLARIZER_ROTATING","docId":"PRODUCTION/Modules/POLARIZER_ROTATING"},{"type":"link","label":"Apertures","href":"/docs/PRODUCTION/Modules/APERTURES","docId":"PRODUCTION/Modules/APERTURES"}]}]},{"type":"link","label":"openUC2 Documentation","href":"/docs/intro","docId":"intro"}]},"docs":{"Electronics/APIDescription/Encoder":{"id":"Electronics/APIDescription/Encoder","title":"AS 5311 linear encoder for real-time feedback loop","description":"The relevant code can be found here:","sidebar":"tutorialSidebar"},"Electronics/APIDescription/Home":{"id":"Electronics/APIDescription/Home","title":"Home","description":"UC2-ESP Motor Homing Interface API Description","sidebar":"tutorialSidebar"},"Electronics/APIDescription/INTRO":{"id":"Electronics/APIDescription/INTRO","title":"UC2-ESP","description":"This is the API description for the UC2 firmware running on the ESP32 boards. It\'s under heavy active development. You can find the current version here:","sidebar":"tutorialSidebar"},"Electronics/APIDescription/LEDArray":{"id":"Electronics/APIDescription/LEDArray","title":"LED array","description":"This API provides a convenient method for controlling individual LEDs within a NeoPixel LED array using the UC2-ESP firmware. The interface facilitates the manipulation of LED colors and array display modes. It operates through JSON documents sent over USB serial communication.","sidebar":"tutorialSidebar"},"Electronics/APIDescription/Motor":{"id":"Electronics/APIDescription/Motor","title":"Motor","description":"This API provides a straightforward way to control and manage motors using the UC2-ESP firmware. The interface operates over USB serial communication and accepts JSON documents to control motor movements. The main endpoint for motor control is /motor_act.","sidebar":"tutorialSidebar"},"Electronics/APIDescription/PinConfig":{"id":"Electronics/APIDescription/PinConfig","title":"PinConfig","description":"UC2 System Version 2 and 3, and WEMOS Board Pinout Description","sidebar":"tutorialSidebar"},"Electronics/PS4-Controller":{"id":"Electronics/PS4-Controller","title":"PS4-Controller","description":"If you are using the webserial online flashing tool provided by UC2 (https://youseetoo.github.io/) to flash the firmware onto your ESP8266 or ESP32 development board, the process of connecting the PS4 controller to the UC2-ESP remains similar to the steps mentioned earlier. However, please note that the flashing tool is a separate tool for uploading firmware, and the Bluetooth communication with the PS4 controller needs to be implemented in your firmware code.","sidebar":"tutorialSidebar"},"Electronics/UC2-REST/ESP32_Messaging_Callback":{"id":"Electronics/UC2-REST/ESP32_Messaging_Callback","title":"UC2-REST: Messaging","description":"This documentation covers the callback functionality integrated into the firmware, particularly focusing on the Message class. The Message class is designed to parse incoming messages from the ESP32, facilitating the conversion of hardware inputs and other events into software actions.","sidebar":"tutorialSidebar"},"Electronics/UC2-REST/ESP32_Motor":{"id":"Electronics/UC2-REST/ESP32_Motor","title":"UC2-REST: Motor","description":"This section provides detailed documentation on the Motor class designed for controlling motors via the firmware. The Motor class includes functionality for motor movement, triggering, position tracking, and stage scanning, among other features.","sidebar":"tutorialSidebar"},"Electronics/UC2-REST/INTRO":{"id":"Electronics/UC2-REST/INTRO","title":"UC2-REST","description":"This explains the basic functionality of the UC2-REST Python interface in conjunction with the UC2-ESP32 mainboard. This was mostly designed to interface with ImSwitch.","sidebar":"tutorialSidebar"},"Electronics/uc2e1":{"id":"Electronics/uc2e1","title":"Introduction","description":"Overview","sidebar":"tutorialSidebar"},"Electronics/uc2e2":{"id":"Electronics/uc2e2","title":"Hardware","description":"\ud83d\udd0c Board layout and schematics","sidebar":"tutorialSidebar"},"Electronics/uc2e3":{"id":"Electronics/uc2e3","title":"Getting Started","description":"First Steps, Getting Started, Flashing - Simply Quick Start!","sidebar":"tutorialSidebar"},"Electronics/uc2e5":{"id":"Electronics/uc2e5","title":"REST principle","description":"Introduction into the ESP32 microcontroller firmware","sidebar":"tutorialSidebar"},"Electronics/uc2e5.1":{"id":"Electronics/uc2e5.1","title":"REST commands","description":"Controlling hardware using the WebSerial Standard","sidebar":"tutorialSidebar"},"Electronics/uc2e5.2":{"id":"Electronics/uc2e5.2","title":"Python commands","description":"Using UC2-REST in Python","sidebar":"tutorialSidebar"},"Electronics/uc2e6":{"id":"Electronics/uc2e6","title":"Connecting devices","description":"Connect devices","sidebar":"tutorialSidebar"},"Electronics/uc2e7":{"id":"Electronics/uc2e7","title":"Controlling the UC2e","description":"Controlling the ESP32","sidebar":"tutorialSidebar"},"Electronics/uc2e8":{"id":"Electronics/uc2e8","title":"Compiling from Scratch","description":"UC2-ESP Firmware for the openUC2 UC2e electronics","sidebar":"tutorialSidebar"},"Electronics/uc2e9":{"id":"Electronics/uc2e9","title":"Replace Hardware","description":"\u274c Replacing parts","sidebar":"tutorialSidebar"},"ImSwitch/DahengCamera":{"id":"ImSwitch/DahengCamera","title":"Install driver for Daheng Camera","description":"Windows","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchClient":{"id":"ImSwitch/ImSwitchClient","title":"ImSwitchClient Documentation","description":"ImSwitchClient is a Python package designed to connect to the ImSwitch REST API, enabling remote control of ImSwitchUC2 functionalities directly from Jupyter Notebooks. This client facilitates easy integration with the ImSwitch ecosystem, offering programmable access to various features like laser control, stage manipulation, and image acquisition.","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchConfig":{"id":"ImSwitch/ImSwitchConfig","title":"ImSwitchConfig","description":"ImSwitch Config File","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchInstall":{"id":"ImSwitch/ImSwitchInstall","title":"Install ImSwitch","description":"What will you learn?","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchInstaller":{"id":"ImSwitch/ImSwitchInstaller","title":"Install ImSwitch using the ImSwitch Installer","description":"We created a customized conda installer for the ImSwitchUC2 package that is based on the open-source conda constructor project. All files to construct the package installer can be found in this repository https://github.com/beniroquai/ImSwitchInstaller","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchInstallUbuntu":{"id":"ImSwitch/ImSwitchInstallUbuntu","title":"ImSwitchInstallUbuntu","description":"ImSwitch Installation Ubuntu","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchInstallWindows":{"id":"ImSwitch/ImSwitchInstallWindows","title":"ImSwitchInstallWindows","description":"ImSwitch Installation on Windows","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchUpdate":{"id":"ImSwitch/ImSwitchUpdate","title":"ImSwitchUpdate","description":"Updated openUC2 ImSwitch","sidebar":"tutorialSidebar"},"intro":{"id":"intro","title":"openUC2 Documentation","description":"Here you can find all information to enhance, repair, improve, use, communicate,.... our optical toolbox openUC2. Did not find what you were looking for? No problem. Send us a mail or write an issue in our github repository https://github.com/openUC2/UC2-GIT/issues.","sidebar":"tutorialSidebar"},"Investigator/Lightsheet/LightSheet":{"id":"Investigator/Lightsheet/LightSheet","title":"openUC2 Light-Sheet Microscope","description":"In this experiment, we will explore the concept of optical sectioning to improve the resolution along the optical axis and the XY plane. The Light-Sheet Microscope, also known as the Light-Sheet Microscopy or Lattice Light-Sheet Microscopy, is a powerful technique used to acquire volumetric images of samples, such as zebrafishes. This technique enables us to visualize biological specimens in three dimensions with high resolution and minimal phototoxicity.","sidebar":"tutorialSidebar"},"Investigator/Lightsheet/LightSheet Sample":{"id":"Investigator/Lightsheet/LightSheet Sample","title":"openUC2 Light-Sheet Tips and Tricks","description":"Introduction to the openUC2 Light-Sheet Microscope","sidebar":"tutorialSidebar"},"Investigator/Lightsheet/LightsheetCalibration":{"id":"Investigator/Lightsheet/LightsheetCalibration","title":"Light-sheet alignment","description":"This tutorial will show you how to find the light-sheet and align this w.r.t. the camera plane.","sidebar":"tutorialSidebar"},"Investigator/Lightsheet/LightSheetOld":{"id":"Investigator/Lightsheet/LightSheetOld","title":"openUC2 Light-Sheet Microscope (Old Version)","description":"This is the manual for the Light sheet Microscope.","sidebar":"tutorialSidebar"},"Investigator/STORM/Electronics":{"id":"Investigator/STORM/Electronics","title":"Electronics","description":"Here we make use of the ESP32 Wemos D1 R32 microcontroller board in combination with the CNC Shield v3. The wiring of the different components is straight forward as the Stepper Motors are attached to the stepper drivers and the Laser is triggered by the SpinEn pin. The NeoPixel LED mounts to the Hold pin.","sidebar":"tutorialSidebar"},"Investigator/STORM/Illumination":{"id":"Investigator/STORM/Illumination","title":"Setting up the laser","description":"Laser illumination","sidebar":"tutorialSidebar"},"Investigator/STORM/Main":{"id":"Investigator/STORM/Main","title":"U.C.*STORM*","description":"---","sidebar":"tutorialSidebar"},"Investigator/STORM/Results":{"id":"Investigator/STORM/Results","title":"Results","description":"Imaging with the UC2-STORM setup","sidebar":"tutorialSidebar"},"Investigator/STORM/Software":{"id":"Investigator/STORM/Software","title":"Software","description":"For the control and acquisition software, we use ImSwitch. This is an open-source software centered around Napari as a multi-layer viewer and a rich framework for QT-based widgets. We make use of the open-source localization framework \\"microEye\\" ()","sidebar":"tutorialSidebar"},"Investigator/STORM/Stability":{"id":"Investigator/STORM/Stability","title":"Stability","description":"Setup stability","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/AlignLaser":{"id":"Investigator/XYZMicroscope/AlignLaser","title":"Aligning the Beamsplitter Cube","description":"The new xyz microscope has a special 2x1 cube that holds the fluorescence optics. Inside the beamsplitter cube is mounted kinematically and can be adjusted with 3 set screws. It\'s important that the fiber coupled laser is focussed / reimaged in the back focal plane of the objective lens. Therefore, we have created a little tutorial to get you starting how this works.","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/FiveD_v1":{"id":"Investigator/XYZMicroscope/FiveD_v1","title":"openUC2 FiveD v1","description":"Unpacking the microscope","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/FiveD_v2":{"id":"Investigator/XYZMicroscope/FiveD_v2","title":"openUC2 FiveD v2","description":"Design Files","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/FiveD_v3":{"id":"Investigator/XYZMicroscope/FiveD_v3","title":"openUC2 FiveD v3","description":"Design Files","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/HistoScan":{"id":"Investigator/XYZMicroscope/HistoScan","title":"Histo Scanner Plugin Documentation","description":"Welcome to the documentation page for the Histo Scanner Plugin, a powerful tool for scanning large areas and stitching images onto a large canvas. This page provides detailed information on how to configure and use the plugin effectively.","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/MCTPlugin":{"id":"Investigator/XYZMicroscope/MCTPlugin","title":"MCT (Multi-Colour Timelapse) Imaging Plugin","description":"More information are coming soon","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/ROIScanner":{"id":"Investigator/XYZMicroscope/ROIScanner","title":"ROI Scanner","description":"Starting ImSwitch on Ubuntu and Start the ROI Scanner","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/SetupPhasecontrast":{"id":"Investigator/XYZMicroscope/SetupPhasecontrast","title":"openUC2 Phase-Contrast Setup Tutorial","description":"Introduction: Understanding Phase-Contrast Microscopy","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/SetupTubelens":{"id":"Investigator/XYZMicroscope/SetupTubelens","title":"openUC2 Setting up the tube lens","description":" - - + + \ No newline at end of file diff --git a/docs/Electronics/uc2e7/index.html b/docs/Electronics/uc2e7/index.html index 8fa2a60fa..c72bb0e26 100644 --- a/docs/Electronics/uc2e7/index.html +++ b/docs/Electronics/uc2e7/index.html @@ -9,15 +9,15 @@ - - + +

Controlling the UC2e

Controlling the ESP32

The unified "REST-API" (inspired, not following full protocol), enables you to control the functionalities from multiple different clients (e.g. Python, Webrowser, Android Phone). The Core idea is to file post/get requests (serial/wifi) that send/receive JSON files that do "something".

Installing the USB Serial Driver Install the CH340 USB Serial driver is explained in more detail here: Sparkfun

🐍 Python Bindings

In order to interact with the electronics, we implemented a Python library called UC2-REST, available here that will help you to work with the device. The easiest way to install it would be:

pip install uc2-rest

It will automatically detect your UC2e (if the driver is installed), connect and will offer you the basic functionalities such as moving the motor, etc.

In order to give you a deep dive in what's possible, we provide a Jupyter Notebook that guides you through all the functionalities. You can find it here Start Jupiter Tutorial

📲 Android APP

This is coming soon. You will be able to control the electronics using the Wifi connection of your Android phone.

💻 Browser APP

If the ESP32 is offereing an access point or is connected to your wifi router, you can access the webserver running on the ESP32 using a browser. It offers limited control over the Endpoints by filing post and get requests.

More information are coming soon!

🎮 Playstation 3 or Playstation 4 Controller (comming soon)

With the open-source libraries PS3Controller and PS4Controller we are able to make use of the Bluetooth-able joysticks from your beloved game console.

When a PS4 controller is 'paired' to a PS4 console, it just means that it has stored the console's Bluetooth MAC address, which is the only device the controller will connect to. Usually, this pairing happens when you connect the controller to the PS4 console using a USB cable, and press the PS button. This initiates writing the console's MAC address to the controller.

Therefore, if you want to connect your PS4 controller to the ESP32, you either need to figure out what the Bluetooth MAC address of your PS4 console is and set the ESP32's address to it, or change the MAC address stored in the PS4 controller.

Whichever path you choose, you might want a tool to read and/or write the currently paired MAC address from the PS4 controller. You can try using sixaxispairer for this purpose.

If you opted to change the ESP32's MAC address, you'll need to include the ip address in the PS4.begin() function during within the setup() Arduino function like below where 1a:2b:3c:01:01:01 is the MAC address (note that MAC address must be unicast):

void setup()
{
PS4.begin("1a:2b:3c:01:01:01");
Serial.println("Ready.");
}

Controlling using ImSwitch

Please have a look here for more information about how to install ImSwitch and here for the UC2-related setup files including the UC2-REST serial interface.

- - + + \ No newline at end of file diff --git a/docs/Electronics/uc2e8/index.html b/docs/Electronics/uc2e8/index.html index 2f1004f60..d592bc96c 100644 --- a/docs/Electronics/uc2e8/index.html +++ b/docs/Electronics/uc2e8/index.html @@ -9,8 +9,8 @@ - - + +
@@ -18,7 +18,7 @@ 4.1. Go to Platformio Home and navigate to Devices 4.2 Copy the Device port (if connected) and insert that into the platformio.ini, e.g. upload_port = /dev/cu.SLAB_USBtoUART or COM3 for windows
  • Hit the PlatformIO upload button; The following task will be run: platformio run --target upload; The code is getting compiled and saved into ./.pio/build/ 5.1 The code will be uploaded. If everything goes right the terminal says: `Leaving... Hard resetting via RTS pin...``
  • open the PlatformIO serial monitor (remember to also change the port in the platform.io accordingly) and check the ESP32's output (eventually hit the reset button)
  • In case you have any problems: File an issue :-)
  • In order to test several commands, you can find a useful list of json files in this file: json_api_BD.txt

    V1: Source-code, Compiling and Binaries (Deprecated)

    The current version of the firmware can be found here: https://github.com/openUC2/UC2-REST/tree/master/ESP32

    Additional information on how to install and compile the board can be found in the README

    Precompiled binaries that can be installed through ImSwitch (more information coming soon) or the esptool.pycan be found here https://github.com/openUC2/UC2-REST/tree/master/ESP32/build

    V1: Install necessary software for UC2 rest (flash and interact) (Deprecated)

    Here you learn how to install the necessary software (Arduino IDE, drivers, ESP-IDF, Arduino libraries) that are necessary for the system to be working. Everything is explained in the video below.

    Additional information about the UC2 electronics and UC2-REST are provided here: https://github.com/openUC2/UC2-REST

    Download and install the software:

    To simplify life, we host a dropbox folder containing all the necessary drivers and Software pieces for this workshop. It will run on a Windows 10 64 Bit system:

    List of relevant files

    for the UC2-REST

    • Arduino IDE: arduino-1.8.18-windows.exe
    • ESP32 USB driver: CH341SER.exe
    • UC2 Rest firmware: UC2-REST.zip

    Alternative GitHub links that provide you with the latest version of the software:

    Steps to install the software

    1. Download all relevant files from the Dropbox folder above
    2. Install the Arduino IDE (including all drivers if you are asked during the installation)
    3. Install the CH340 USB Serial driver https://learn.sparkfun.com/tutorials/how-to-install-ch340-drivers/all
    4. Extract BenesArduinoLibraries-master.zip to /User/$USER$/Documents/Aduino/libraries
    5. Open the Arduino IDE and add the ESP32 board configuration. For this you need to add the following URL to the settings tag: https://dl.espressif.com/dl/package_esp32_index.json, http://arduino.esp8266.com/stable/package_esp8266com_index.json. For additional information please have a look in this tutorial
    6. Once done, open the Board manager and add the ESP32version 2.0.3
    7. Unzip the folder UC2-REST and open the file /ESP32/main/main.ino
    8. Select the board, the port and hit the compile and upload button
    9. IMPORTANT when setting up the build + upload, make sure you add this setting for the partition scheme (and potentially all others if not already set as default):

    The system accepts different hardware configurations (pins, devices, etc.). All of this is defined in the pindef_XXXX.h. Please have a look in the UC2-REST repository for additional information: https://github.com/openUC2/UC2-REST

    VIDEO Tutorial: Steps to install the software

    - - + + \ No newline at end of file diff --git a/docs/Electronics/uc2e9/index.html b/docs/Electronics/uc2e9/index.html index 0f531ed12..66b5164ed 100644 --- a/docs/Electronics/uc2e9/index.html +++ b/docs/Electronics/uc2e9/index.html @@ -9,13 +9,13 @@ - - + + - - + + \ No newline at end of file diff --git a/docs/ImSwitch/DahengCamera/index.html b/docs/ImSwitch/DahengCamera/index.html index 1fec03dec..7379ce9f2 100644 --- a/docs/ImSwitch/DahengCamera/index.html +++ b/docs/ImSwitch/DahengCamera/index.html @@ -9,14 +9,14 @@ - - + +

    Install driver for Daheng Camera

    Windows

    Have a look here: https://www.get-cameras.com/requestdownload and install the drivers. / SDK (newer version of ImSwitch ships drivers.)

    Linux

    ARM

    You can use the camera on the Raspberry Pi or Jetson Nano. For this you can do the following steps:

    cd ~
    cd Downloads
    wget https://dahengimaging.com/downloads/Galaxy_Linux-armhf_Gige-U3_32bits-64bits_1.5.2303.9202.zip
    cd Galaxy_Linux-armhf_Gige-U3_32bits-64bits_1.5.2303.9202
    chmod +x Galaxy_camera.run
    sudo ./Galaxy_camera.run
    # go through questionaire
    sudo reboot

    Install Python bindings

    cd ~/Downlodas
    wget https://dahengimaging.com/downloads/Galaxy_Linux_Python_2.0.2106.9041.tar.gz
    tar -xvf Galaxy_Linux_Python_2.0.2106.9041.tar.gz
    cd ~/Downlodas/Galaxy_Linux_Python_2.0.2106.9041/api
    # conda activate ****ENV
    pip install -e .
    cd ~/Downlodas/Galaxy_Linux_Python_2.0.2106.9041/api
    python ~/Downloads/Galaxy_Linux_Python_2.0.2106.9041/sample/GxSingleCamMono GxSingleCamMono.py

    The result will be:

    /home/uc2/Downloads/Galaxy_Linux_Python_2.0.2106.9041/sample/GxSingleCamMono/GxSingleCamMono.py:19: SyntaxWarning: "is" with a literal. Did you mean "=="?
    if dev_num is 0:

    -------------------------------------------------------------
    Sample to show how to acquire mono image continuously and show acquired image.
    -------------------------------------------------------------

    Initializing......

    Frame ID: 0 Height: 3036 Width: 4024

    Sample Script

    # version:1.0.1905.9051
    import gxipy as gx
    from PIL import Image


    def main():
    # print the demo information
    print("")
    print("-------------------------------------------------------------")
    print("Sample to show how to acquire mono image continuously and show acquired image.")
    print("-------------------------------------------------------------")
    print("")
    print("Initializing......")
    print("")

    # create a device manager
    device_manager = gx.DeviceManager()
    dev_num, dev_info_list = device_manager.update_device_list()
    if dev_num is 0:
    print("Number of enumerated devices is 0")
    return

    # open the first device
    cam = device_manager.open_device_by_index(1)

    # exit when the camera is a color camera
    if cam.PixelColorFilter.is_implemented() is True:
    print("This sample does not support color camera.")
    cam.close_device()
    return

    # set continuous acquisition
    cam.TriggerMode.set(gx.GxSwitchEntry.OFF)

    # set exposure
    cam.ExposureTime.set(10000)

    # set gain
    cam.Gain.set(10.0)

    # start data acquisition
    cam.stream_on()

    # acquire image: num is the image number
    num = 1
    for i in range(num):
    # get raw image
    raw_image = cam.data_stream[0].get_image()
    if raw_image is None:
    print("Getting image failed.")
    continue

    # create numpy array with data from raw image
    numpy_image = raw_image.get_numpy_array()
    if numpy_image is None:
    continue

    # print height, width, and frame ID of the acquisition image
    print("Frame ID: %d Height: %d Width: %d"
    % (raw_image.get_frame_id(), raw_image.get_height(), raw_image.get_width()))

    # stop data acquisition
    cam.stream_off()

    # close device
    cam.close_device()

    if __name__ == "__main__":
    main()
    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchClient/index.html b/docs/ImSwitch/ImSwitchClient/index.html index ca5da89ce..db11b8cc0 100644 --- a/docs/ImSwitch/ImSwitchClient/index.html +++ b/docs/ImSwitch/ImSwitchClient/index.html @@ -9,13 +9,13 @@ - - + +

    ImSwitchClient Documentation

    ImSwitchClient is a Python package designed to connect to the ImSwitch REST API, enabling remote control of ImSwitchUC2 functionalities directly from Jupyter Notebooks. This client facilitates easy integration with the ImSwitch ecosystem, offering programmable access to various features like laser control, stage manipulation, and image acquisition.

    PyPI Version

    Features

    Installation

    To install ImSwitchClient, use the following pip command:

    pip install imswitchclient

    Quick Start Example

    This example demonstrates basic usage of ImSwitchClient for moving a positioner and acquiring an image.

    import imswitchclient.ImSwitchClient as imc
    import numpy as np
    import matplotlib.pyplot as plt
    import time

    # Initialize the client
    client = imc.ImSwitchClient()

    # Retrieve the first positioner's name and current position
    positioner_names = client.positionersManager.getAllDeviceNames()
    positioner_name = positioner_names[0]
    currentPositions = client.positionersManager.getPositionerPositions()[positioner_name]
    initialPosition = (currentPositions["X"], currentPositions["Y"])

    # Define and move to a new position
    newPosition = (initialPosition[0] + 10, initialPosition[1] + 10)
    client.positionersManager.movePositioner(positioner_name, "X", newPosition[0], is_absolute=True, is_blocking=True)
    client.positionersManager.movePositioner(positioner_name, "Y", newPosition[1], is_absolute=True, is_blocking=True)

    # Acquire and display an image
    time.sleep(0.5) # Allow time for the move
    lastFrame = client.recordingManager.snapNumpyToFastAPI()
    plt.imshow(lastFrame)
    plt.show()

    # Return the positioner to its initial position
    client.positionersManager.movePositioner(positioner_name, "X", initialPosition[0], is_absolute=True, is_blocking=True)
    client.positionersManager.movePositioner(positioner_name, "Y", initialPosition[1], is_absolute=True, is_blocking=True)

    Contributing

    Contributions to ImSwitchClient are welcome! Please refer to the project's GitHub repository for contribution guidelines: https://github.com/openUC2/imswitchclient/.

    License

    ImSwitchClient is licensed under the MIT License. For more details, see the LICENSE file in the project repository.

    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchConfig/index.html b/docs/ImSwitch/ImSwitchConfig/index.html index d272a986c..82ea6019d 100644 --- a/docs/ImSwitch/ImSwitchConfig/index.html +++ b/docs/ImSwitch/ImSwitchConfig/index.html @@ -9,13 +9,13 @@ - - + +

    ImSwitchConfig

    ImSwitch Config File

    This is a sample uc2_hik_histo.jsonconfiguration file:

    {
    "positioners": {
    "ESP32Stage": {
    "managerName": "ESP32StageManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "isEnable": true,
    "enableauto": false,
    "stepsizeX": -0.3125,
    "stepsizeY": -0.3125,
    "stepsizeZ": 0.3125,
    "homeSpeedX": 15000,
    "homeSpeedY": 15000,
    "homeSpeedZ": 15000,
    "isDualaxis": true,
    "homeDirectionX": 1,
    "backlashXOld": 15,
    "backlashYOld": 40,
    "backlashX": 0,
    "backlashY": 0,
    "homeEndstoppolarityY": 0,
    "homeDirectionY": -1,
    "homeDirectionZ": 0,
    "homeXenabled": 1,
    "homeYenabled": 1,
    "homeZenabled": 0,
    "initialSpeed": {
    "X": 15000,
    "Y": 15000,
    "Z": 15000
    }
    },
    "axes": [
    "X",
    "Y",
    "Z"
    ],
    "forScanning": true,
    "forPositioning": true
    }
    },
    "rs232devices": {
    "ESP32": {
    "managerName": "ESP32Manager",
    "managerProperties": {
    "host_": "192.168.43.129",
    "serialport": "COM3"
    }
    }
    },
    "lasers": {
    "LED": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index": 1
    },
    "wavelength": 635,
    "valueRangeMin": 0,
    "valueRangeMax": 1023
    }
    },
    "detectors": {
    "WidefieldCamera": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "HikCamManager",
    "managerProperties": {
    "isRGB": 1,
    "cameraListIndex": 0,
    "cameraEffPixelsize": 0.2257,
    "hikcam": {
    "exposure": 0,
    "gain": 0,
    "blacklevel": 100,
    "image_width": 1000,
    "image_height": 1000
    }
    },
    "forAcquisition": true,
    "forFocusLock": true
    },
    "Observer": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "OpenCVCamManager",
    "managerProperties": {
    "cameraListIndex": 1,
    "cameraListIndexWIN": 0,
    "isRGB":1,
    "opencvcam": {
    "exposure": 10
    }
    },
    "forAcquisition": true
    }
    },
    "autofocus": {
    "camera": "WidefieldCamera",
    "positioner": "ESP32Stage",
    "updateFreq": 10,
    "frameCropx": 780,
    "frameCropy": 400,
    "frameCropw": 500,
    "frameCroph": 100
    },
    "mct": {
    "monitorIdx": 2,
    "width": 1080,
    "height": 1920,
    "wavelength": 0,
    "pixelSize": 0,
    "angleMount": 0,
    "patternsDirWin": "C:\\Users\\wanghaoran\\Documents\\ImSwitchConfig\\imcontrol_slm\\488\\",
    "patternsDir": "/users/bene/ImSwitchConfig/imcontrol_sim/488"
    },
    "dpc": {
    "wavelength": 0.53,
    "pixelsize": 0.2,
    "NA": 0.3,
    "NAi": 0.3,
    "n": 1.0,
    "rotations": [
    0,
    180,
    90,
    270
    ]
    },
    "webrtc": {},
    "PixelCalibration": {},
    "focusLock": {
    "camera": "WidefieldCamera",
    "positioner": "ESP32StageManager",
    "updateFreq": 4,
    "frameCropx": 0,
    "frameCropy": 0,
    "frameCropw": 0,
    "frameCroph": 0
    },
    "LEDMatrixs": {
    "ESP32 LEDMatrix": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDMatrixManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "Nx": 4,
    "Ny": 4,
    "wavelength": 488,
    "valueRangeMin": 0,
    "valueRangeMax": 32768
    }
    }
    },
    "availableWidgets": [
    "Settings",
    "View",
    "Recording",
    "Image",
    "Laser",
    "Positioner",
    "Autofocus",
    "MCT",
    "UC2Config",
    "ImSwitchServer",
    "PixelCalibration",
    "HistoScan",
    "LEDMatrix",
    "Joystick",
    "Flatfield",
    "ROIScan"
    ],
    "nonAvailableWidgets": [
    "STORMRecon",
    "DPC",
    "Hypha",
    "FocusLock",
    "HistoScan",
    "FocusLock",
    "FOVLock"
    ]
    }

    Configuration File Documentation

    Overview

    This configuration file is designed to manage settings and properties of various components in a complex system, such as positioners, RS232 devices, lasers, detectors, autofocus settings, etc. It is structured in JSON format for ease of reading and editing.

    Sections

    1. Positioners

      • ESP32Stage
        • managerName: Specifies the manager responsible for handling this positioner, in this case, ESP32StageManager.
        • managerProperties: Contains detailed settings for the positioner, such as RS232 device identification, step sizes for different axes, home speeds, axis enable/disable settings, and other mechanical properties.
        • axes: Lists the axes controlled by this positioner (X, Y, Z).
        • forScanning & forPositioning: Boolean flags to indicate if the positioner is used for scanning and/or positioning.
    2. RS232 Devices

      • ESP32
        • managerName: The manager handling RS232 devices, here ESP32Manager.
        • managerProperties: Network and port settings for the RS232 device.
    3. Lasers

      • LED
        • Details for managing LED laser settings, including the manager name (ESP32LEDLaserManager), RS232 device reference, channel index, wavelength, and value range.
    4. Detectors

      • WidefieldCamera & Observer
        • Configuration for different camera detectors, including manager names (HikCamManager, OpenCVCamManager), properties like RGB support, camera indexes, pixel size, and acquisition settings.
    5. Autofocus

      • Configuration for autofocus feature, linking a camera with a positioner and setting parameters like update frequency and frame cropping dimensions.
    6. MCT (Multichannel Tissue)

      • Settings for monitor index, dimensions, wavelength, pixel size, angle mount, and directories for pattern files.
    7. DPC (Differential Phase Contrast)

      • Settings related to DPC imaging, including wavelength, pixel size, numerical aperture, refractive index, and rotation angles.
    8. WebRTC

      • An empty section possibly reserved for WebRTC configuration.
    9. Pixel Calibration

      • An empty section likely intended for pixel calibration settings.
    10. Focus Lock

      • Focus lock settings similar to autofocus but with its distinct configuration.
    11. LED Matrixes

      • ESP32 LEDMatrix
        • Configuration for LED matrixes, specifying manager details, RS232 device, dimensions, wavelength, and value range.
    12. Available Widgets

      • A list of widgets that are available in the system, indicating the features or components that can be controlled or monitored.
    13. Non-Available Widgets

      • A list of widgets that are not available, possibly indicating features not supported or deactivated in the current setup.

    Conclusion

    This configuration file is a comprehensive document that outlines the settings and parameters for various hardware and software components in a specialized system. It is critical for ensuring the correct operation of the equipment it is designed to control.

    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchInstall/index.html b/docs/ImSwitch/ImSwitchInstall/index.html index c43cdc839..dfd60ddb2 100644 --- a/docs/ImSwitch/ImSwitchInstall/index.html +++ b/docs/ImSwitch/ImSwitchInstall/index.html @@ -9,13 +9,13 @@ - - + +

    Install ImSwitch

    What will you learn?

    • How to install the main GUI software to control the Optics components
    • How to intsall the drivers

    Download the software

    Duration: 1

    Please go to our ImSwitch Repository and download the latest Build-artefact:

    GitHub Actions -> "bundle"

    Extract the ZIP folder

    Duration: 2

    Right click on the downloaded ZIP folder and select "extract all". This may take a while.

    Download and Install the drivers for the Camera/UC2 Electronics board

    Electronics Board (CH340)

    For the CH340 driver, please follow these instructions

    The driver is available here

    Download the Windows CH340 Driver
    Unzip the file
    Run the installer which you unzipped
    In the Device Manager when the CH340 is connected you will see a COM Port in the Tools > Serial Port menu, the COM number for your device may vary depending on your system.

    Electronics Board (CP210x)

    The driver for the CP210x is available here

    Download the Windows CP210x Driver
    Unzip the file
    Run the installer which you unzipped
    In the Device Manager when the CP210x is connected you will see a COM Port in the Tools > Serial Port menu, the COM number for your device may vary depending on your system.

    Daheng Imaging Cameras

    Download the Windows SDK USB2+USB3+GigE (including Directshow + Python) Galaxy V1.18.2208.9301 for the Daheng USB3 Cameras from the Get-Cameras Website

    Install the packages on your computer.

    Duration: 2

    Prepare ImSwitch

    1. Connect the Camera with your computer (optionally test its proper functioning using Daheng Galaxy Viewer(x64)) using the USB3 cable
    2. Connect the UC2 electronics with the computer (USB micro) and hook up the 12V power supply with the power connection
    3. Check if you can see the USB COM Port in your device manager:

    Duration: 2

    Install the UC2 ImSwitch Configurations

    In order to use the UC2 Hardware and the Daheng USB Camera, you need the UC2 config files. Please go to https://github.com/openUC2/ImSwitchConfig/tree/stable and download the Repository as a zip file following this link.

    Once it has been downloaded, unzip it to C:\Users\YOURUSERNAME\Documents\ImSwitchConfig

    It should look like this:

    Duration: 2

    Start ImSwitch

    1. Open a Windows Terminal by typing WIN+R, then type CMD and hit enter.

    1. in the Windows Terminal navigate to the folder where you downloaded the softare - e.g. cd C:\Users\UC2\Downloads\imswitch-windows-latest\ImSwitch and hit enter
    2. start the executable BUT we need to add one comment in advance:set SETUPTOOLS_USE_DISTUTILS=stdlib
    3. Type ImSwitch.exe and hit enter, the executable will open the ImSwitch GUI

    If everything has been configured correctly, the GUI should open and look like this. Additional information on its functionality can be found in the Read-The-Docs: https://imswitch.readthedocs.io/en/stable/

    If you have any additional questions or issues, please post them in the ISSUE section here.

    Explanatory Video on how to get started with ImSwitch

    Duration: 3

    https://www.youtube.com/watch?v=Om6GWZZ_0So

    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchInstallUbuntu/index.html b/docs/ImSwitch/ImSwitchInstallUbuntu/index.html index 30557270a..0ea9a37ba 100644 --- a/docs/ImSwitch/ImSwitchInstallUbuntu/index.html +++ b/docs/ImSwitch/ImSwitchInstallUbuntu/index.html @@ -9,13 +9,13 @@ - - + +

    ImSwitchInstallUbuntu

    ImSwitch Installation Ubuntu

    Step 1: Install Visual Studio Code (VS Code)

    1. Open a web browser and navigate to the VS Code download page.
    2. Download the Debian package for your 64-bit system.
    3. Once downloaded, open a terminal window and navigate to the directory where the .deb file is located.
    4. Run the following command to install VS Code:
      sudo dpkg -i <filename>.deb
      sudo apt-get install -f

    Step 2: Install Miniconda

    1. Open a terminal window and run the following command to download Miniconda:
      wget https://repo.anaconda.com/miniconda/Miniconda3-py310_23.5.2-0-Linux-x86_64.sh
    2. Make the script executable and run it:
      bash Miniconda3-py310_23.5.2-0-Linux-x86_64.sh
    3. Follow the on-screen instructions to complete the installation.
    4. Create a new environment named imswitch with Python 3.10:
      conda create -n imswitch python=3.10 -y

    Step 3: Clone Necessary Repositories

    1. Navigate to the Downloads directory:
      cd ~/Downloads
    2. Clone the required repositories:
      git clone https://github.com/openUC2/UC2-REST
      git clone https://github.com/openUC2/ImSwitch
      git clone https://gitlab.com/bionanoimaging/nanoimagingpack
    1. Activate the imswitch environment:
      conda activate imswitch
    2. Navigate to the ImSwitch directory and install it:
      cd ~/Downloads/imswitch
      pip install -e .
    3. Repeat for UC2-REST and nanoimagingpack:
      cd ~/Downloads/UC2-REST
      pip install -e .
      cd ~/Downloads/nanoimagingpack # Correcting typo from original logs
      pip install -e .

    Step 5: Install Camera Drivers

    1. Clone the camera drivers:
      cd ~/Downloads
      git clone https://github.com/hongquanli/octopi-research/
    2. Navigate to the camera drivers directory and run the installation script:
      cd octopi-research/software/drivers\ and\ libraries/daheng\ camera/Galaxy_Linux-x86_Gige-U3_32bits-64bits_1.2.1911.9122/
      ./Galaxy_camera.run

    Step 6: Clone ImSwitch Configuration and Set Permissions

    1. Navigate to the Documents directory:
      cd ~/Documents
    2. Clone the ImSwitch configuration:
      git clone https://github.com/openUC2/ImSwitchConfig
    3. Change the ownership of the device:
      sudo chown pi:pi /dev/ttyUSB0

    Congratulations! You have successfully installed ImSwitch and related dependencies.

    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchInstallWindows/index.html b/docs/ImSwitch/ImSwitchInstallWindows/index.html index 0df8e772a..1130b57dd 100644 --- a/docs/ImSwitch/ImSwitchInstallWindows/index.html +++ b/docs/ImSwitch/ImSwitchInstallWindows/index.html @@ -9,14 +9,14 @@ - - + +

    ImSwitchInstallWindows

    ImSwitch Installation on Windows

    Step 1: Install Visual Studio Code (VS Code)

    1. Open a web browser and go to the VS Code download page.
    2. Download the Windows Installer.
    3. Once the download is complete, locate the installer and double-click to run it.
    4. Follow the on-screen instructions to complete the installation.

    Step 2: Install Miniconda

    1. Open a web browser and navigate to the Miniconda download page.
    2. Download the Windows installer for the 64-bit version of Miniconda.
    3. Run the installer by double-clicking the downloaded file.
    4. Follow the installer prompts to install Miniconda to a directory of your choice (e.g., C:\Miniconda3).
    5. During installation, ensure that the option to "Add Anaconda to my PATH environment variable" is selected.

    Step 3: Clone Necessary Repositories

    1. Ensure you have installed GIT for windows
    2. Open the Command Prompt:
      • Press Win + R, type cmd, and press Enter.
    3. Navigate to your preferred directory where you want to clone the repositories (e.g., C:\Users\<YourUsername>\Downloads):
      cd C:\Users\<YourUsername>\Downloads
    4. Clone the required repositories:
      git clone https://github.com/openUC2/UC2-REST
      git clone https://github.com/openUC2/ImSwitch
      git clone https://gitlab.com/bionanoimaging/nanoimagingpack
    1. Open the Command Prompt.
    2. Create a new Conda environment named imswitch with Python 3.10:
      conda create -n imswitch python=3.10
    3. Activate the imswitch environment:
      conda activate imswitch
    4. Navigate to the ImSwitch directory and install it: (The e just states to install the packages in editable mode)
      cd C:\Users\<YourUsername>\Downloads\ImSwitch
      pip install -e .
    5. Repeat for UC2-REST and nanoimagingpack:
      cd C:\Users\<YourUsername>\Downloads\UC2-REST
      pip install -e .
      cd C:\Users\<YourUsername>\Downloads\nanoimagingpack
      pip install -e .

    Step 5: Install Camera Drivers for Daheng Cameras

    1. Download the galaxy camera sdk for windows here https://www.get-cameras.com/requestdownload
    2. Double-click on the installation executable file (Galaxy_camera.exe) to run it.
    3. Follow the on-screen instructions to complete the installation.

    Step 6: Clone ImSwitch Configuration and Set Permissions

    1. Navigate to the Documents directory using the Command Prompt:
      cd C:\Users\<YourUsername>\Documents
    2. Clone the ImSwitch configuration:
      git clone https://github.com/openUC2/ImSwitchConfig

    Step 7: Start

    conda activate imswitch
    imswitch

    Problems and Solutions

    Very likely, there will be a PyQt issue.

    Try:

    pip install PyQt5 --force-reinstall

    Congratulations! You have successfully installed ImSwitch and related dependencies on Windows.

    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchInstaller/index.html b/docs/ImSwitch/ImSwitchInstaller/index.html index 53270ccb3..911ae1e67 100644 --- a/docs/ImSwitch/ImSwitchInstaller/index.html +++ b/docs/ImSwitch/ImSwitchInstaller/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -19,7 +19,7 @@

  • Choose a path to install the conda environment to (Hint: path should not exceed 40 digits)

    1. Proceed

    1. Wait until the packages are installed

    1. A command prompt will appear and ask you for granting admin rights; Here a python script tries to download and install the silabs USB UART drivers for the ESP32

    2. The installation will tell you if the installation process scussful

    1. In the next steps, all necessary packages in the environemnt for ImSwitch will be downloaded and installed

    1. The installer informs you once it's done

    1. Once everything has been installed, the installer tells you it'S done

    1. Exit the installer by hitting finish

    1. On the desktop a new icon has been created to start the ImSwitch software. Double click and wait until the windows shows up

    Trouble shoot

    The conda installer installs your environment in the location that you have selected previously. To find out, you can open a command line window by hiting the keys WIN+r and type "cmd" and enter. Then enter

    conda env list

    The name imswitchopenuc2 should appear. You can activate this python environemnt by typing

    conda activate imswitchopenuc2

    If this works successfully, you can start imswitch by typing

    imswitch

    Disclaimer

    This is still very early stage and may have errors. Exepect Errors Feel free to file any issues in our repository or write us a mail. :)

    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchUpdate/index.html b/docs/ImSwitch/ImSwitchUpdate/index.html index c42fbfb81..64611150f 100644 --- a/docs/ImSwitch/ImSwitchUpdate/index.html +++ b/docs/ImSwitch/ImSwitchUpdate/index.html @@ -9,13 +9,13 @@ - - + +

    ImSwitchUpdate

    Updated openUC2 ImSwitch

    In this guide, we'll walk you through the process of updating ImSwitch after you've installed it using pip. The update consists of three main steps:

    1. Updating the ImSwitch UC2 version
    2. Updating the UC2-REST
    3. Updating the UC2-ESP32 firmware

    1. Updating the ImSwitch UC2 Version

    Assumption: You have previously cloned the ImSwitch repository using git.

    1. Open your terminal.

    2. Activate the ImSwitch environment:

      conda activate imswitch
    3. Navigate to the directory where you cloned ImSwitch:

      cd <DIRECTORY/WHERE/YOU/DOWNLOADED/IMSWITCH>
    4. Pull the latest version from the repository and install:

      git pull https://github.com/openUC2/ImSwitch/
      pip install -e .

    2. Updating the UC2-REST to Interface the UC2 Electronics

    Assumption: You have previously cloned the UC2-REST repository using git.

    1. In the terminal, navigate to the directory where you cloned UC2-REST:

      cd <DIRECTORY/WHERE/YOU/DOWNLOADED/UC2-REST>
    2. Pull the latest version from the repository and install:

      git pull https://github.com/openUC2/UC2-REST/
      pip install -e .

    3. Updating the UC2-ESP32 Firmware

    1. Visit the UC2 Firmware Page.
    2. Select the board you're using. If you're uncertain about this, feel free to reach out via email.
    3. Click on the "Connect" button.
    4. From the browser-provided list, select the COM port.
    5. Click on "Flash Firmware".
    6. Wait for the installation process to complete.
    7. Test the firmware on the UC2 Web Serial Test Page.
    8. Close the browser window to release the serial port.

    Finally, you can start ImSwitch:

    python -m imswitch
    - - + + \ No newline at end of file diff --git a/docs/Investigator/Lightsheet/LightSheet Sample/index.html b/docs/Investigator/Lightsheet/LightSheet Sample/index.html index a9eff41eb..60f87de52 100644 --- a/docs/Investigator/Lightsheet/LightSheet Sample/index.html +++ b/docs/Investigator/Lightsheet/LightSheet Sample/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -31,7 +31,7 @@ The openUC2 light-sheet microscope features motorized axes for enhanced control and precision:
    • The Z-axis adjusts the objective lens focus relative to the light-sheet plane.
    • The X axis moves the sample in the vertical direction with respect to the ground surface.
    • The Y axis shifts the sample parallel to the light-sheet plane.
    • The A axis moves the sample along the light-sheet plane, towards or away from the objective lens.
    • Each step corresponds to approximately 300nm in physical units, enabling fine-tuned movement and positioning.

    We appreciate your engagement with the openUC2 light-sheet microscope and hope that these technical details enhance your understanding of the setup and its capabilities. Should you have any inquiries or require further assistance, please don't hesitate to reach out.

    Protocol to align the light-sheet w.r.t. the focus plane

    Alignment Protocol for Light-Sheet Microscope Focus Plane

    Efficient alignment of the light-sheet with the microscope objective lens's focus plane is crucial for optimal imaging results. This protocol outlines the steps to achieve precise alignment using fluorescent markers and manipulation of the kinematic mirror.

    Alignment Steps:

    1. Fluorescent Marker Setup:

      • Begin by ensuring that the light-sheet is coplanar with the microscope's objective lens field of view.
      • Use a fluorescent pen marker to label the embedding media, effectively visualizing the light-sheet.

    2. Activating the Laser:

    1. Visualizing the Light-Sheet:

      • With the laser activated, you should observe the light-sheet within the water chamber. Refer to the provided image for a reference.
    2. Kinematic Mirror Adjustment:

      • The three screws on the kinematic mirror in the right corner control the orientation of the light-sheet in 3D space.
      • Familiarize yourself with the degrees of freedom associated with these screws.

    1. Fundamental Considerations:
      • The cylindrical lens focuses the primary light-sheet in the backfocal plane of the illumination objective (4x, 0.1 NA).
      • Rotating the objective lens adjusts the orientation of the light-sheet.
      • The square orientation of the cylindrical lens ensures proper alignment with the detection objective lens.
      • The primary light-sheet exits the cylindrical lens at the center.
      • The kinematic mirror manipulates the light-sheet's position in the x and y directions, as well as introducing an offset.
      • Correct mirror alignment is crucial, placing it precisely at the diagonal center of the cube.
      • This central placement ensures that the primary light-sheet enters the objective lens's backfocal plane (BFP) at the center.
      • Such alignment results in the secondary illuminating light-sheet being parallel to the detection lens's focus plane.
      • Observe the effects of rotating the screws and adjust accordingly.

    1. Fluorescent Solution Application:
      • Utilize a syringe for convenient application of the fluorescent solution.

    7. Sample Cube Handling:

    • The sample cube is magnetically held, facilitating easy removal for cleaning.

    • Take care as the sample cube's coverslips are relatively thin and can break.

    Achieving precise alignment between the light-sheet and the objective lens's focus plane is critical for obtaining accurate imaging results. This protocol provides a systematic approach to optimizing your light-sheet microscope setup. For further assistance or questions, feel free to reach out to our community and support channels. Your engagement contributes to the ongoing refinement of the openUC2 light-sheet microscope system.

    Finding the the focus (waiste) of the light-sheet

    To effectively align the light-sheet in your setup, it's crucial to follow these two key steps:

    Step 1: Centering the Sheet within the Field of View (FOV)

    1. Begin by ensuring that the waist of the light-sheet is positioned at the center of the microscope's field of view (FOV).
    2. To achieve this, the cylindrical lens needs to be temporarily removed. Carefully release the lower puzzle pieces to detach the cylindrical lens cube.

    1. With the cylindrical lens removed, a collimated beam should enter the back focal plane (BFP) of the illuminating objective lens.
    2. Adjust the kinematic mirror to guide the round beam, approximately 10mm in diameter, into the center of the BFP of the illuminating objective lens. This alignment should be parallel to the optical axis.

    Step 2: Achieving Focus with the Detection Objective Lens

    1. Activate the camera, such as using Galaxy Viewer software that comes with the camera drivers, to observe the light-sheet's focus.
    2. The fluorescently labeled region should now exhibit a focused beam, perceptible to the naked eye.
    3. Initiate axial movement of the objective lens (Axis Z) using the online control website. You'll notice an increase in intensity at either the positive or negative direction until the light-sheet focus becomes visible within the field of view.

    1. To optimize focus, make fine adjustments to the kinematic mirror to direct the light-sheet beam if it's positioned too high or too low.
    2. It's common for the light-sheet's focus not to align precisely with the center of the FOV. In this case, carefully adjust the position of the illuminating objective lens along the cube axis to relocate the focus positions.
    3. Once you're content with the alignment, deactivate the laser and reinsert the cylindrical lens.
    4. Notably, this step doesn't need to be repeated each time the light-sheet is activated. The position of the cylindrical lens is relatively stable and doesn't require frequent recalibration.

    Following these steps meticulously will ensure that the light-sheet is accurately aligned both within the FOV's center and in-focus with the detection objective lens. This alignment process is essential for obtaining reliable and high-quality imaging results with the openUC2 light-sheet microscope.

    Once the cylindrical lens is back in, you can readjust the light-sheet wr.t. to the focus plane of the objective lens since they may be a slight variation after reassembly.

    Brightfield imaging

    In case you want to image the sample in transmisson mode, turn on the Neopixel LED that is connected to the sample cube and optionally remove the fluorescent filter by pulling it up and store it somewhere safe (dust and scratch free!).

    Using the Fully Assembled Light-Sheet Microscope for Sample Imaging

    Now that all components are meticulously aligned, the openUC2 light-sheet microscope is primed for sample imaging. Follow these steps to prepare and capture your fluorescent sample:

    1. Sample Preparation:

      • Begin by preparing your fluorescent sample according to the specified protocols.
      • Carefully follow the steps outlined in the dedicated sample preparation section within this document.
    2. Assembling the Sample Holder:

      • Loosen the nut that secures the syringe and insert the syringe into the sample holder.
      • Gradually lower the syringe so that the tip of the sample barely touches the light-sheet within the sample plane.

    1. Squeezing out the Agarose:
      • Squeeze out the agarose gently from the syringe while observing the sample, starting with brightfield imaging.
      • Monitor the camera's image stream to ensure the sample becomes visible within the field of view.

    1. Observing Brightfield Image:
      • If the sample isn't immediately visible, confirm its positioning within the sample cube and make minor adjustments in XYZ to bring it into view on the camera screen.
      • Once visible in brightfield, deactivate the LED light source.

    1. Switching to Laser Illumination:

      • Turn on the laser source, and initially, remove the fluorescent filter.
      • Adjust the imaging settings to enhance contrast and visibility, increasing intensity, exposure time, and/or camera gain until you obtain a clear, well-exposed image with minimal noise.
    2. Fine-tuning Laser Position:

      • Using bright scattering as a guide, locate the laser's position while ensuring you have reinserted the fluorescent filter.
      • Adjust the intensity as needed.
    3. Sample Positioning:

      • Manipulate the sample's position in XYZ space to center it on a region of interest.
    4. ImSwitch Scan and Reconstruction:

      • Utilize ImSwitch software's scan and reconstruction plugin to perform scans of your sample.
      • The specific scan and reconstruction process details are provided in the ImSwitch documentation.

    This completes the procedure for imaging your fluorescent sample using the fully assembled openUC2 light-sheet microscope. With careful preparation and precise adjustments, you can capture high-quality volumetric images that offer valuable insights into the structure and behavior of your sample. Your engagement with the microscope's capabilities contributes to ongoing advancements in microscopic research and exploration.

    ImSwitch data acquisition and Reconstruction

    We assume the system is running and you were able to install ImSwitch on your computer. The configuration JSONfile that describes the light-sheet system can be found further down this document. A tutorial on how to install our ImSwitch Version (SRC: https://github.com/openUC2/ImSwitch/) can be either found in the imSwitch repository or in the ImSwitch section in this wiki.

    Mount the sample on a metal tip

    Glue the sample on an M5 set screw using super glue or blutek (non-safe, sample can fall off). Insects offer a great level of fluorescent signal due to autofluorescence and act as nice training samples that can simply hang down using this method

    Sample preparation á la agarose-in-syringe method

    SRC

    Sample Preparation Protocol for openUC2 Light-Sheet Microscope Imaging: Fluorescently Labeled Zebrafish

    This simplified protocol outlines the steps to prepare a fluorescently labeled zebrafish sample for imaging using the openUC2 light-sheet microscope. This method involves embedding the sample in an agarose cylinder for stable imaging in an aqueous environment. the "aquarium" or water-filled sample chamber is used to do refractive index matching as the sample would scatter too much light otherwise.

    Materials Required:

    • 1.5% Agar
    • Glass capillary
    • Zebrafish embryo (some other volumetric, mostly clear sample that can be excited at 488nm)
    • Sample medium
    • Falcon tube or small beaker
    • syringe or FEP tube (optional, for increased stability)

    Procedure:

    Mounting in Free-Hanging Agarose Cylinder:

    1. Take the syringe and cut away the tip
    2. Melt 1.5% agar at 70ºC and maintain it at 37ºC.
    3. Insert the plunger into the syringe capillary, ensuring the white end barely protrudes and suck in enough agarose
    4. Gently place the zebrafish embryo into the already solidified agarose, minimizing the water content.
    5. Pull the plunger to draw up about 3cm (1 inch) of melted agarose.
    6. Carefully position the sample close to the capillary's end.
    7. Allow the agarose to set for 1-2 minutes.
    8. When ready to image, gently push the plunger down to extrude the agarose cylinder with the sample, placing it just outside the capillary for imaging.

    Further tweaks for the system

    These steps are not necessary, but help you to customize the microscope to better match your sample configuration.

    Remove the xyz stage from the top

    In case you want to do maintenance on the microscope, the xyz stage can easily be removed by releasing the M3x55mm screws from the bottom part. Therfore, remove the puzzle piece that has be mounted below the objective lens and release the 3 screws that mount the stage plate to the uppoer part of the microscope. You can now release the stage. In order to move it back on, do the reverse process.

    Swap the sample mounting plate

    In principle the XYZ stage can mount any sample geometry. We wanted to start with something and adapted the common syringe mount. Only two screws from below have to be released in order to swap the sample mount plate:

    This part can be customized to adapt e.g. conventional sample slides

    ImSwitch configuration for the ligth-sheet

    {
    "positioners": {
    "ESP32Stage": {
    "managerName": "ESP32StageManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "enableauto": 0,
    "isEnable": 1
    },
    "axes": [
    "X",
    "Y",
    "Z",
    "A"
    ],
    "forScanning": true,
    "forPositioning": true
    }
    },
    "rs232devices": {
    "ESP32": {
    "managerName": "ESP32Manager",
    "managerProperties": {
    "host_": "192.168.43.129",
    "serialport_": "COM3",
    "serialport": "/dev/cu.usbserial-A50285BI"
    }
    }
    },
    "lasers": {
    "488 Laser": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index":1,
    "filter_change": false,
    "laser_despeckle_period": 10,
    "laser_despeckle_amplitude": 0
    },
    "wavelength": 488,
    "valueRangeMin": 0,
    "valueRangeMax": 1024
    },
    "LED Matrix": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index": "LED",
    "filter_change": false,
    "filter_axis": 3,
    "filter_position": 32000,
    "filter_position_init": -0
    },
    "wavelength": 635,
    "valueRangeMin": 0,
    "valueRangeMax": 255
    }
    },
    "detectors": {
    "WidefieldCamera": {
    "ExtPackage": "imswitch_det_webcam",
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "GXPIPYManager",
    "managerProperties": {
    "cameraListIndex": 1,
    "gxipycam": {
    "exposure": 20,
    "gain": 0,
    "blacklevel": 10,
    "image_width": 1000,
    "image_height": 1000
    }
    },
    "forAcquisition": true,
    "forFocusLock": true
    }
    },
    "rois": {
    "Full chip": {
    "x": 600,
    "y": 600,
    "w": 1200,
    "h": 1200
    }
    },
    "LEDMatrixs": {
    "ESP32 LEDMatrix": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDMatrixManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "Nx": 4,
    "Ny": 4
    },
    "wavelength": 488,
    "valueRangeMin": 0,
    "valueRangeMax": 32768
    }
    },
    "autofocus": {
    "camera": "WidefieldCamera",
    "positioner": "ESP32Stage",
    "updateFreq": 10,
    "frameCropx": 780,
    "frameCropy": 400,
    "frameCropw": 500,
    "frameCroph": 100
    },
    "uc2Config": {
    "defaultConfig": "pindefWemos.json",
    "defaultConfig2": "pindefUC2Standalon2.json",
    "defaultConfig1": "pindefUC2Standalon.json"
    },
    "mct": {
    "monitorIdx": 2,
    "width": 1080,
    "height": 1920,
    "wavelength": 0,
    "pixelSize": 0,
    "angleMount": 0,
    "patternsDirWin": "C:\\Users\\wanghaoran\\Documents\\ImSwitchConfig\\imcontrol_slm\\488\\",
    "patternsDir": "/users/bene/ImSwitchConfig/imcontrol_sim/488"
    },
    "dpc": {
    "wavelength": 0.53,
    "pixelsize": 0.2,
    "NA":0.3,
    "NAi": 0.3,
    "n": 1.0,
    "rotations": [0, 180, 90, 270]
    },
    "webrtc":{},
    "PixelCalibration": {},
    "availableWidgets": [
    "Settings",
    "Positioner",
    "View",
    "Recording",
    "Image",
    "Laser",
    "UC2Config",
    "Joystick",
    "Lightsheet",
    "LEDMatrix"
    ],
    "nonAvailableWidgets":[
    "STORMRecon",
    "LEDMatrix",
    "MCT",

    "ImSwitchServer",
    "PixelCalibration",
    "Hypha",
    "FocusLock",
    "HistoScan",

    "FocusLock"]
    }
    - - + + \ No newline at end of file diff --git a/docs/Investigator/Lightsheet/LightSheet/index.html b/docs/Investigator/Lightsheet/LightSheet/index.html index 1a974a7a0..e71da2b8a 100644 --- a/docs/Investigator/Lightsheet/LightSheet/index.html +++ b/docs/Investigator/Lightsheet/LightSheet/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -18,7 +18,7 @@ Z-stage for the objective lens

    Almost Fully assembled UC2 Lighthseet microscope

    Step 2: Light-Sheet Generation and Sample Preparation

    The fiber-coupled laser emits light at a wavelength of 488 nanometers, which is ideal for exciting fluorescent molecules commonly used in biological imaging, such as green fluorescent protein (GFP). The collimated laser beam passes through a cylindrical lens, creating a one-dimensional focus with a width of approximately 10 mm.

    The kinematic mirror allows precise control of the laser beam position, ensuring proper alignment. The lens further shapes the laser beam into an optical sheet, which is then directed into the sample plane by the four-fold objective lens.

    The sample, such as a zebrafish embryo, is held in a small aquarium filled with water. The sample is positioned such that the light sheet intersects it, and fluorescence signals are emitted only where the light sheet illuminates.

    Step 3: Image Acquisition

    Using the XYZ stage, move the sample in the focal plane of the ten-fold objective lens. The camera will capture images as the sample is moved, allowing you to create a three-dimensional stack of the object. The long working distance of the objective lens allows sufficient space between the lens and the sample, reducing the potential for photodamage and phototoxicity.

    Benefits of Light-Sheet Microscopy

    Light-sheet microscopy offers several advantages for imaging biological samples:

    • Optical sectioning: The light-sheet illuminates only the focal plane, minimizing background noise and out-of-focus signals.
    • Reduced phototoxicity: With the sample illuminated only in the focal plane, light-sheet microscopy reduces photodamage and photobleaching, allowing long-term imaging of live samples.
    • High-speed imaging: Light-sheet microscopy enables rapid volumetric imaging, capturing dynamic processes in real-time.
    • High resolution: The combination of optical sectioning and minimal scattering allows for high-resolution imaging, revealing fine cellular structures.

    Bill-of-Material

    This is a list of components that are used in the latest version of the openUC2 light-sheet microscope. This is subject to changes. If you are interested to build one of these devices and need a kit, please, don't hesitate to contact us via Mail, Github or other channels :) Find more information on www.openuc2.com

    CategoryAmountPartShopPrice (€)CommentQuantityURL/SourceAlternative
    External Parts1Cylindrical lens, comarThorolabs1501Link
    1Camera, monochrome, CMOSDaheng3501Link
    1Focusing stage, micrometer, motorized (NEMA12)China1001Haoran
    110x objective, NA0.3, long-working distanceUSA2501Link
    1XYZ stage, AliExpress, micrometerChina2501LinkLink
    3Motor for stageChina803LinkLink
    1Tube lensChina2001Link
    1Fiber laserChina2001HaoranLink
    1MirrorPGI51Link
    14x objective lens finiteChina101Haoran
    1Fiber CollimatorChina1001Haoran
    14BaseplatesopenUC2314
    8CubesopenUC2510
    1Solid baseplate (aluminium)openUC2-1
    1Excitation filter (Thorlabs)Thorolabs1201Link
    Inserts1Fiber Collimator MountopenUC251
    1Cylindrical Lens MountopenUC251
    145° Mirror Mount (kinematic)openUC2251
    1RMS Lens MountopenUC251
    1Sample mount (printed)openUC2301
    1Base for XYZ StageopenUC221
    1Sample mount for XYZ StageopenUC2151
    1Holder for Z-stage motorizedopenuc2301
    1Holder for TubelensopenUC2101
    1Holder for UC2 ElectronicsopenUC2301
    Electronics1Electronics, Powersupply, Stepper driveropenUC21001
    2USB cables (camera, micro)Germany602
    1Playstation ControllerGermany501
    1Box + Foam insertopenUC21001Link
    Labour & Shipping-Labour + Shipping-5001
    TOTAL---2790-

    The 3D printing files can be found here

    Conclusion

    Congratulations! You have successfully built a light-sheet microscope using the UC2 modular toolbox. This powerful technique allows you to acquire high-resolution three-dimensional images of samples like zebrafishes. With the ability to perform optical sectioning and minimal phototoxicity, light-sheet microscopy is a valuable tool for studying biological structures in 3D. You can now explore the fascinating world of 3D biological imaging and discover new insights into the complexities of life at the microscopic level. Happy imaging!

    - - + + \ No newline at end of file diff --git a/docs/Investigator/Lightsheet/LightSheetOld/index.html b/docs/Investigator/Lightsheet/LightSheetOld/index.html index dd567b5b4..70f48c42c 100644 --- a/docs/Investigator/Lightsheet/LightSheetOld/index.html +++ b/docs/Investigator/Lightsheet/LightSheetOld/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -33,7 +33,7 @@

    5. Use of filters
    • When using a correct filter between the Z-stage and the camera, it's possible to observe a fluorescent image of the sample.
    • Without filters you capture only the scattering image.

    Imaging with the light sheet microscope

    • The focus of the detection path can be finely adjusted using the Z-stage motor (GUI - Z).
    • Z-series can be acquired by moving the sample (GUI - X) through the focused light sheet plane - Move the sample-stage in both directions, using the lens tissue as a sample, to observe how the camera image changes.
    • To acquire an image: Choose "Start experiment" on the right side of the screen, click "Custom" on the top right side and then "Snap" on the bottom right side.
    • To acquire a z-stack use the tomographic mode:

    Results

    What can you see with the simplest possible light sheet setup:

    The result could look like this:

    Zebra fish embryo

    Participate!

    Do you want to show your own results? Do you have ideas for improvements? Let us know!

    - - + + \ No newline at end of file diff --git a/docs/Investigator/Lightsheet/LightsheetCalibration/index.html b/docs/Investigator/Lightsheet/LightsheetCalibration/index.html index 2daa03316..3bd9eb296 100644 --- a/docs/Investigator/Lightsheet/LightsheetCalibration/index.html +++ b/docs/Investigator/Lightsheet/LightsheetCalibration/index.html @@ -9,13 +9,13 @@ - - + + - - + + \ No newline at end of file diff --git a/docs/Investigator/STORM/Electronics/index.html b/docs/Investigator/STORM/Electronics/index.html index c6c64a407..1200c15ae 100644 --- a/docs/Investigator/STORM/Electronics/index.html +++ b/docs/Investigator/STORM/Electronics/index.html @@ -9,13 +9,13 @@ - - + +

    Electronics

    Here we make use of the ESP32 Wemos D1 R32 microcontroller board in combination with the CNC Shield v3. The wiring of the different components is straight forward as the Stepper Motors are attached to the stepper drivers and the Laser is triggered by the SpinEn pin. The NeoPixel LED mounts to the Hold pin.

    Flashing the firmware

    Go to the website https://youseetoo.github.io/ and choose the CNC board as the hardware configuration to flash the latest version of the Firmware. The PS3 controller's MAC address has to be setup with the PS Pairing tool. The actual MAC Address is printed out on the Serial monitor while the Board is booting up.

    - - + + \ No newline at end of file diff --git a/docs/Investigator/STORM/Illumination/index.html b/docs/Investigator/STORM/Illumination/index.html index 87ad73dbf..bef16770c 100644 --- a/docs/Investigator/STORM/Illumination/index.html +++ b/docs/Investigator/STORM/Illumination/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -111,7 +111,7 @@ represents 10 µm. Two CCPs have been zoomed in to plot the profiles along the red transparent line. Scale bar for the magnified regions of interest represents 200 nm.

    - - + + \ No newline at end of file diff --git a/docs/Investigator/STORM/Main/index.html b/docs/Investigator/STORM/Main/index.html index 6015977ca..60b8526f8 100644 --- a/docs/Investigator/STORM/Main/index.html +++ b/docs/Investigator/STORM/Main/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -82,7 +82,7 @@

    Mechanical stability of the setup

    See the mechanical stability section of the repository.

    Wide-field imaging, Live-cell imaging, Single molecule applications

    See the Results section of the repository.

    Get Involved

    This project is open so that anyone can get involved. You don't even have to learn CAD designing or programming. Find ways you can contribute in CONTRIBUTING

    License and Collaboration

    This project is open-source and is released under the CERN open hardware license. Our aim is to make the kits commercially available. We encourage everyone who is using our Toolbox to share their results and ideas, so that the Toolbox keeps improving. It should serve as a easy-to-use and easy-to-access general purpose building block solution for the area of STEAM education. All the design files are generally for free, but we would like to hear from you how is it going.

    You're free to fork the project and enhance it. If you have any suggestions to improve it or add any additional functions make a pull-request or file an issue.

    Please find the type of licenses here

    REMARK: All files have been designed using Autodesk Inventor 2019 (EDUCATION)

    Collaborating

    If you find this project useful, please like this repository, follow us on Twitter and cite the webpage or the publication! :-)

    - - + + \ No newline at end of file diff --git a/docs/Investigator/STORM/Results/index.html b/docs/Investigator/STORM/Results/index.html index 09f7a0e27..76f582789 100644 --- a/docs/Investigator/STORM/Results/index.html +++ b/docs/Investigator/STORM/Results/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -56,7 +56,7 @@ represents 10 µm. Two CCPs have been zoomed in to plot the profiles along the red transparent line. Scale bar for the magnified regions of interest represents 200 nm.

    - - + + \ No newline at end of file diff --git a/docs/Investigator/STORM/Software/index.html b/docs/Investigator/STORM/Software/index.html index d0b1bc217..2f94b687f 100644 --- a/docs/Investigator/STORM/Software/index.html +++ b/docs/Investigator/STORM/Software/index.html @@ -9,13 +9,13 @@ - - + +

    Software

    For the control and acquisition software, we use ImSwitch. This is an open-source software centered around Napari as a multi-layer viewer and a rich framework for QT-based widgets. We make use of the open-source localization framework "microEye" ()

    Installation

    For the installation we advise you to have a look at the ImSwitch repository here https://github.com/kasasxav/ImSwitch/

    After setting up ImSwitch, you can enable STORM reconstruction in real time using the MicroEye Plugin by adding the following configuration to the ImSwitch config file that is located in ~/Documents/ImSwitchConfig/config/imcontrol_options.json

    {
    "setupFileName": "example_uc2_storm_alliedvision.json",
    "recording": {
    "outputFolder": "./ImSwitch/ImSwitch/recordings",
    "includeDateInOutputFolder": true
    },
    "watcher": {
    "outputFolder": "/Users/bene/ImSwitchConfig/scripts"
    }
    }

    The setup file with the actual hardware configuration can be placed here:

    ~/Documents/ImSwitchConfig/imcontrol_setups/example_uc2_storm_alliedvision.json

    {
    "positioners": {
    "ESP32Stage": {
    "managerName": "ESP32StageManager",
    "managerProperties": {
    "rs232device": "ESP32"
    },
    "axes": [
    "X",
    "Y",
    "Z"
    ],
    "forScanning": true,
    "forPositioning": true
    }
    },
    "rs232devices": {
    "ESP32": {
    "managerName": "ESP32Manager",
    "managerProperties": {
    "host_": "192.168.43.129",
    "serialport_windows": "COM5",
    "serialport": "/dev/cu./dev/cu.SLAB_USBtoUART"
    }
    }
    },
    "lasers": {
    "488 Laser": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index": 1,
    "filter_change": false,
    "laser_despeckle_period": 10,
    "laser_despeckle_amplitude": 0
    },
    "wavelength": 488,
    "valueRangeMin": 0,
    "valueRangeMax": 1024
    },
    "635 Laser": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index": 2,
    "filter_change": false,
    "laser_despeckle_period": 10,
    "laser_despeckle_amplitude": 0
    },
    "wavelength": 635,
    "valueRangeMin": 0,
    "valueRangeMax": 1024
    },
    "LED": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index": "LED",
    "filter_change": false,
    "filter_axis": 3,
    "filter_position": 32000,
    "filter_position_init": -0
    },
    "wavelength": 635,
    "valueRangeMin": 0,
    "valueRangeMax": 255
    }
    },
    "detectors": {
    "WidefieldCamera": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "AVManager",
    "managerProperties": {
    "cameraListIndex": 1,
    "mocktype": "STORM",
    "mockstackpath": "/Users/bene/Downloads/New_SMLM_datasets/ROI_cos7MT_AF647fluopaint.tif",
    "avcam": {
    "exposure": 0,
    "gain": 0,
    "blacklevel": 100,
    "image_width": 1000,
    "image_height": 1000,
    "pixel_format": "Mono12"
    }
    },
    "forAcquisition": true,
    "forFocusLock": false
    }
    },
    "rois": {
    "Full chip": {
    "x": 600,
    "y": 600,
    "w": 1200,
    "h": 1200
    }
    },
    "LEDMatrixs": {
    "ESP32 LEDMatrix": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDMatrixManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "Nx": 4,
    "Ny": 4
    },
    "wavelength": 488,
    "valueRangeMin": 0,
    "valueRangeMax": 32768
    }
    },
    "autofocus": {
    "camera": "WidefieldCamera",
    "positioner": "ESP32Stage",
    "updateFreq": 10,
    "frameCropx": 780,
    "frameCropy": 400,
    "frameCropw": 500,
    "frameCroph": 100
    },
    "availableWidgets": [
    "Settings",
    "View",
    "Recording",
    "Image",
    "Laser",
    "Positioner",
    "Autofocus",
    "STORMRecon"
    ]
    }

    ImSwitch in Action

    Here you can find a tour on Youtube how to set up everything and what it can do.

    https://www.youtube.com/watch?v=r8f-wmeq5i0

    - - + + \ No newline at end of file diff --git a/docs/Investigator/STORM/Stability/index.html b/docs/Investigator/STORM/Stability/index.html index 7dfe46bd7..77b5749cd 100644 --- a/docs/Investigator/STORM/Stability/index.html +++ b/docs/Investigator/STORM/Stability/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -23,7 +23,7 @@ sigma value of the localized beads over the measurement duration. The sigma value correlates with the defocusing of the beads i.e. low changes in sigma suggest small fluctuations of the samples axial position.

    - - + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/AlignLaser/index.html b/docs/Investigator/XYZMicroscope/AlignLaser/index.html index b48205115..68b05d626 100644 --- a/docs/Investigator/XYZMicroscope/AlignLaser/index.html +++ b/docs/Investigator/XYZMicroscope/AlignLaser/index.html @@ -9,13 +9,13 @@ - - + +

    Aligning the Beamsplitter Cube

    The new xyz microscope has a special 2x1 cube that holds the fluorescence optics. Inside the beamsplitter cube is mounted kinematically and can be adjusted with 3 set screws. It's important that the fiber coupled laser is focussed / reimaged in the back focal plane of the objective lens. Therefore, we have created a little tutorial to get you starting how this works.

    - - + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/FiveD_v1/index.html b/docs/Investigator/XYZMicroscope/FiveD_v1/index.html index 3e8aee75a..1b2202b50 100644 --- a/docs/Investigator/XYZMicroscope/FiveD_v1/index.html +++ b/docs/Investigator/XYZMicroscope/FiveD_v1/index.html @@ -9,13 +9,13 @@ - - + +

    openUC2 FiveD v1

    Unpacking the microscope

    The hardcover plastic case contains all you need for the microscope:

    • USB micro cable
    • USB3 camera cable
    • 12V power-supply
    • Sweet treat (optional ;D)
    • The actual microscope
    • The objective lens
    • The Illumination unit
    • A heavy Box

    The actual Box looks like this:

    Optional Please also find the treat and make sure you provide yourself with enough sugar throughout this unpacking routine :-)

    The foam holds the microscope in place (the actual colour may differ from what you may see):

    Remove the foam parts (please keep them for later reuse) to end up like this here:

    Getting started

    Mounting the illumination unit

    For this you need a 2.5mm Hex key and the M3 cylindrical screws. Mount the LED Arm like so:

    It should look like this:

    Wiring up the microscope

    First of all we need to wire up the microscope. For this we will start with the 12V power supply. Unfortunately the powersocket is inside the case, hence you have to first eat some candy in order to better find the spot ;-)

    The USB Cable is permanently mounted to the ESP32 UC2e unit:

    Note: Please make sure you have sufficient USB Power. In case the full LED array is turning on, it may happen that the ESP's voltage drops and the USB serial connection fails. A reconnect will help.

    The same holds true for the USB connection to the microcontroller board. You need to hook it up like that:

    Once done, we continue with inserting the objective lens. Eventually the lens is already inserted and you just need to check if the lens is centered correctly

    Wire up the microscope to your computer

    In order to get the microscope working, we first need to install additional drivers. For the Daheng Camera, this would be:

    For additional information and an in-depth explanation for the UC2e system, please have a look here

    - - + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/FiveD_v2/index.html b/docs/Investigator/XYZMicroscope/FiveD_v2/index.html index aa8fc98f6..a4858963a 100644 --- a/docs/Investigator/XYZMicroscope/FiveD_v2/index.html +++ b/docs/Investigator/XYZMicroscope/FiveD_v2/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -39,7 +39,7 @@

    Carefully take the lens out:

    In motion:

    Veritcal Operation

    Troubleshoot

    We learn from mistakes. So lets start learning. The system is fully open, meaning, you can adjust and change the vast majority of the parts on your own. The entire system consists of the openUC2 frame / skeleton and the 3D printed housing to shield it from dust and light. By removing all M3 cylindrical screws, you can detach the housing from the inner structure to eventually repair or alter the system.

    You can find a full description of how to dissassemble the microscope here: https://openuc2.github.io/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscope

    In Action

    We scanned arabidopsis in darkfield (LEDs >9 on):

    Connecting the microscope to the browser and controll it

    We encourage you to use the UC2ified ImSwitch software to control the microscope. You can find it in this repository: https://github.com/openUC2/ImSwitch/

    However, if you want to quick-start the microscope and see if it works, you can open your browser and use the WEB-Serial interface to interact with the microscope.

    Go to https://youseetoo.github.io/ and connect to your board (most right option saying ESP32 DEV-based UC2 standalone board V2). Select the COM Port which is holding the ESP32 and hit the LOG option, once the dialog opens. The alternative option will help you updating the firmware on the device. An in-depth explanation on how the firmware works can be found here.

    In general, you need to send JSON strings in order to control the system. The strings relevant for the Z-microscope are:

    Home the XY-axis

    It's important to always home the Motors in order to avoid them from getting stuck in an end position (ATTENTION!). The following string will move the motor until the endstop is hit. Afterwards it will release the switch:

    {"task":"/home_act", "home": {"steppers": [{"stepperid":1, "timeout": 2000, "speed": 15000, "direction":1, "endposrelease":3000}]}}

    and

    {"task":"/home_act", "home": {"steppers": [{"stepperid":2, "timeout": 2000, "speed": 15000, "direction":1, "endposrelease":3000}]}}

    Afterwards the internal position is set to 0. You can check that by entering:

    {"task": "/motor_get"}

    Move the Z-axis:

    The motor (Nema12) with 200 steps/revolution runs with 16 microstepps and offers a leadscrew with 1mm/revolution. Hence, one step corresponds to 312.5nm. Running the motor can be issued with the following command:

    {"task":"/motor_act",
    "motor":
    {
    "steppers": [
    { "stepperid": 3, "position": 1000, "speed": 15000, "isabs": 3, "isaccel":0}
    ]
    }
    }
    • stepperid: 3 correpsonds to the Z-axis
    • position: steps to go (not physical units!)
    • speed: steps / minute (do not exceed 20000)
    • isabs: absolute or relative motion
    • isaccel: for now, use only non-accelerated motion!

    Safety

    • in case of shattered glass, make sure you don't cut yourself
    • Make sure you don't hurt yourself
    • The moving parts can potentially hurt your finger
    • The electronics - if used in a wrong way - can harm you
    • edges may be sharp, make sure you don't cut yourself
    - - + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/FiveD_v3/index.html b/docs/Investigator/XYZMicroscope/FiveD_v3/index.html index 20895d592..1807b24a1 100644 --- a/docs/Investigator/XYZMicroscope/FiveD_v3/index.html +++ b/docs/Investigator/XYZMicroscope/FiveD_v3/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -23,7 +23,7 @@ the second spring ball now plays along

    The last two fixed balls find their groove almost immediately before the front ball moves up its ramp to the pins

    Final position. All fixed balls in their "pin yokes" and both spring balls press the optics module against them.

    Safety

    • in case of shattered glass, make sure you don't cut yourself
    • Make sure you don't hurt yourself
    • The moving parts can potentially hurt your finger
    • The electronics - if used in a wrong way - can harm you
    • edges may be sharp, make sure you don't cut yourself
    - - + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/HistoScan/index.html b/docs/Investigator/XYZMicroscope/HistoScan/index.html index 252f6674a..ef1ca9c45 100644 --- a/docs/Investigator/XYZMicroscope/HistoScan/index.html +++ b/docs/Investigator/XYZMicroscope/HistoScan/index.html @@ -9,15 +9,15 @@ - - + +

    Histo Scanner Plugin Documentation

    Welcome to the documentation page for the Histo Scanner Plugin, a powerful tool for scanning large areas and stitching images onto a large canvas. This page provides detailed information on how to configure and use the plugin effectively.

    Overview

    The Histoscanner Plugin integrates with the ImSwitch widget and controller to facilitate the scanning of large sample areas. Users can select a sample geometry and initiate scanning, which captures images and stitches them together to form a comprehensive view.

    Initial Setup

    Before starting a scan, ensure the following settings are configured correctly:

    • Pixel Size: Set in the setup.json file. This size must be calibrated, possibly using a ruler.
    • Step Size of Axis: Also set in the setup.json. It typically depends on the steps/mm defined by the leadscrew.
    • Sample Configuration File: An example file can be found here.

    Scanning Process

    The microscope will compute the scan area and the necessary scan stepsize on its own and will perform a snake scan. Alternatively you can provide a list of coordinates.

    Once the scan is successfully initiated, the final output is displayed in a downscaled version on napari to conserve memory.

    ImSwitch Configuration

    The configuration settings for the detector and stage are crucial. Here are the JSON settings for both:

    For the Stage

      "positioners": {
    "ESP32Stage": {
    "managerName": "ESP32StageManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "isEnable": true,
    "enableauto": false,
    "stepsizeX": -0.3125,
    "stepsizeY": -0.3125,
    "stepsizeZ": 0.3125,
    "homeSpeedX": 15000,
    "homeSpeedY": 15000,
    "homeSpeedZ": 15000,
    "isDualaxis": true,
    "homeDirectionX": 1,
    "backlashXOld": 15,
    "backlashYOld": 40,
    "backlashX": 0,
    "backlashY": 0,
    "homeEndstoppolarityY": 0,
    "homeDirectionY": -1,
    "homeDirectionZ": 0,
    "homeXenabled": 1,
    "homeYenabled": 1,
    "homeZenabled": 0,
    "initialSpeed": {
    "X": 15000,
    "Y": 15000,
    "Z": 15000
    }
    },
    "axes": [
    "X",
    "Y",
    "Z"
    ],
    "forScanning": true,
    "forPositioning": true
    }
    }

    For the Detector

      "detectors": {
    "WidefieldCamera": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "HikCamManager",
    "managerProperties": {
    "isRGB": 1,
    "cameraListIndex": 0,
    "cameraEffPixelsize": 0.2257,
    "hikcam": {
    "exposure": 0,
    "gain": 0,
    "blacklevel": 100,
    "image_width": 1000,
    "image_height": 1000
    }
    },
    "forAcquisition": true,
    "forFocusLock": true
    }

    File Handling

    • Storing Metadata: All metadata is stored in the OME.TIF format.
    • Opening in Fiji: Files can be easily opened and stitched in Fiji by importing them as OME.TIF.
    • Opening in ASHLAR: Use the script developed during the openUC2 hackathon available here as a starting point for handling files in Ashlar.

    Hardware/Software Setup

    Correct orientation of the stage coordinates and camera coordinates is essential. The configuration ensures that the camera orientation matches the stage scanning positions.

    In order to have correct orientation it's important that the stage coordinates and the camera coordaintes are matching. The below image shows how the camera has to be orietned w.r.t. the stage scanning positions

    Tutorials and Demonstrations

    • Tutorial on Matching Axes: A tutorial explaining the matching of different axes is available on YouTube.
    • Full Plugin in Action: Watch the plugin in action here.

    Feel free to reach out with any queries or suggestions to enhance this documentation. Happy scanning with Histo Scanner! uUlJuI&ab_channel=openUC2

    Tutorial that explains how the different axes can be matched

    The Full plugin in action

    - - + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/MCTPlugin/index.html b/docs/Investigator/XYZMicroscope/MCTPlugin/index.html index a6a59dfa7..1240cf3ed 100644 --- a/docs/Investigator/XYZMicroscope/MCTPlugin/index.html +++ b/docs/Investigator/XYZMicroscope/MCTPlugin/index.html @@ -9,13 +9,13 @@ - - + + - - + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/ROIScanner/index.html b/docs/Investigator/XYZMicroscope/ROIScanner/index.html index 80aeda62f..b75eaf02d 100644 --- a/docs/Investigator/XYZMicroscope/ROIScanner/index.html +++ b/docs/Investigator/XYZMicroscope/ROIScanner/index.html @@ -9,13 +9,13 @@ - - + +

    ROI Scanner

    Starting ImSwitch on Ubuntu and Start the ROI Scanner

    First of all: Open the terminal. Type the following (all case sensitive):

    conda activate imswitch

    sudo chown user:user /dev/ttyUSB0 # where user is the current user you're logged into (then enter password)

    The USB port may differ, so perhaps also try this:

    sudo chown veo:veo /dev/ttyUSB1
    sudo chown veo:veo /dev/ttyUSB2

    Then:

    python -m imswitch

    The images are stored in the working directory of the terminal.

    - - + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/SetupPhasecontrast/index.html b/docs/Investigator/XYZMicroscope/SetupPhasecontrast/index.html index 8861e11cf..ccdd8b78c 100644 --- a/docs/Investigator/XYZMicroscope/SetupPhasecontrast/index.html +++ b/docs/Investigator/XYZMicroscope/SetupPhasecontrast/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -28,7 +28,7 @@ The cheek cells are relatively thick and cause some additional image degradation (i.e. multiple scattering)

    Using only the green line of the ring improves overall contrast and reduces the artifact

    Multiple cell layers visualized with the UC2 XYZ phase-contrast microscope

    - - + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/SetupTubelens/index.html b/docs/Investigator/XYZMicroscope/SetupTubelens/index.html index d6143b897..ad2521819 100644 --- a/docs/Investigator/XYZMicroscope/SetupTubelens/index.html +++ b/docs/Investigator/XYZMicroscope/SetupTubelens/index.html @@ -9,15 +9,15 @@ - - + +

    openUC2 Setting up the tube lens

    Introduction and Explanation

    The tube lens in the openUC2 setup is a critical component responsible for transforming an object that is far away (approximately infinitely far away) into an image on the camera sensor. Proper calibration of the tube lens is essential to ensure that it produces a sharp and clear image. In this documentation, we will guide you through the process of setting up the tube lens for your openUC2 system.

    To achieve optimal performance, you will need to follow these key steps:

    1. Adjust the CCTV lens to focus on objects at infinity.
    2. Fully open the aperture to maximize light collection.

    Additionally, there is a small tool called "the crown" that facilitates the alignment of the ring components. You can download "the crown" tool from this link.

    Procedure

    Follow these steps to set up the tube lens for your openUC2 system:

    1. Mount the Camera

    Begin by mounting the camera using the C-mount onto the CCTV lens. Ensure that you include the spacer ring to align the image plane of the CCTV lens with the sensor plane of the camera.

    2. Adjust the Image Plane

    Make sure that the camera is securely attached to the CCTV lens, preventing it from turning or becoming loose.

    3. Install "The Crown"

    Insert "the crown" tool between the camera and the CCTV lens. This tool will assist in aligning the rings correctly.

    4. Secure the Camera

    Turn the camera clockwise, typically 2-3 full rotations, to fully secure it to the CCTV lens.

    5. Open the Aperture

    Rotate the camera counter-clockwise until the aperture is fully open. Ensure that the camera is parallel to the ground during this step to maximize light intake.

    6. Focus to Infinity

    Now, use "the crown" tool to turn the outer focus ring of the CCTV lens counter-clockwise. This adjustment will ensure that the CCTV lens is focused at infinity.

    By following these steps, you will successfully set up the tube lens for your openUC2 system, allowing it to capture sharp and well-focused images. Remember to check and fine-tune your settings as needed to achieve the best results for your specific imaging requirements.

    - - + + \ No newline at end of file diff --git a/docs/Investigator/ZMicroscope/UpackZMicroscope/index.html b/docs/Investigator/ZMicroscope/UpackZMicroscope/index.html index b50297258..d440a64ec 100644 --- a/docs/Investigator/ZMicroscope/UpackZMicroscope/index.html +++ b/docs/Investigator/ZMicroscope/UpackZMicroscope/index.html @@ -9,13 +9,13 @@ - - + +

    Unpack the openUC2 Z-Microscope

    Unpacking the microscope

    The hardcover plastic case contains all you need for the microscope:

    • USB micro cable
    • USB3 camera cable
    • 12V power-supply
    • Sweet treat
    • The actual microscope
    • The objective lens

    The actual Box looks like this:

    Please also find the treat and make sure you provide yourself with enough sugar throughout this unpacking routine :-)

    The foam holds the microscope in place (the actual colour may differ from what you may see):

    The cables are hidden behind the foam:

    Check if you find the content of the box:

    Getting started

    First of all we need to wire up the microscope. For this we will start with the 12V power supply. Unfortunately the powersocket is inside the case, hence you have to first eat some candy in order to better find the spot ;-)

    The same holds true for the USB connection to the microcontroller board. You need to hook it up like that:

    Once done, we continue with inserting the objective lens. Eventually the lens is already inserted and you just need to check if the lens is centered correctly

    The microscope should look like this:

    Wire up the microscope to your computer

    In order to get the microscope working, we first need to install additional drivers. For the Daheng Camera, this would be:

    For additional information and an in-depth explanation for the UC2e system, please have a look here

    Troubleshoot

    We learn from mistakes. So lets start learning. The system is fully open, meaning, you can adjust and change the vast majority of the parts on your own. The entire system consists of the openUC2 frame / skeleton and the 3D printed housing to shield it from dust and light. By removing all M3 cylindrical screws, you can detech the housing from the inner structure to eventually repair or alter the system.

    A 2.5m hex key will help you for finishing this job:

    Lift the lid and the microscpe will follow (make sure all cables are detached):

    Now you can start working on the "inner bits":

    In Action

    Here you see the extended focussing of the objective lens:

    Connecting the microscope to the browser and controll it

    We encourage you to use the UC2ified ImSwitch software to control the microscope. You can find it in this repository: https://github.com/openUC2/ImSwitch/

    However, if you want to quick-start the microscope and see if it works, you can open your browser and use the WEB-Serial interface to interact with the microscope.

    Go to https://youseetoo.github.io/ and connect to your board (most right option saying ESP32 DEV-based UC2 standalone board V2). Select the COM Port which is holding the ESP32 and hit the LOG option, once the dialog opens. The alternative option will help you updating the firmware on the device. An in-depth explanation on how the firmware works can be found here.

    In general, you need to send JSON strings in order to control the system. The strings relevant for the Z-microscope are:

    Home the Z-axis

    It's important to always home the Motors in order to avoid them from getting stuck in an end position (ATTENTION!). The following string will move the motor until the endstop is hit. Afterwards it will release the switch:

    {"task":"/home_act", "home": {"steppers": [{"stepperid":3, "timeout": 2000, "speed": 15000, "direction":1, "endposrelease":3000}]}}

    Afterwards the internal position is set to 0. You can check that by entering:

    {"task": "/motor_get"}

    Move the Z-axis:

    The motor (Nema12) with 200 steps/revolution runs with 16 microstepps and offers a leadscrew with 1mm/revolution. Hence, one step corresponds to 312.5nm. Running the motor can be issued with the following command:

    {"task":"/motor_act",
    "motor":
    {
    "steppers": [
    { "stepperid": 3, "position": 1000, "speed": 15000, "isabs": 3, "isaccel":0}
    ]
    }
    }
    • stepperid: 3 correpsonds to the Z-axis
    • position: steps to go (not physical units!)
    • speed: steps / minute (do not exceed 20000)
    • isabs: absolute or relative motion
    • isaccel: for now, use only non-accelerated motion!

    Safety

    • in case of shattered glass, make sure you don't cut yourself
    • Make sure you don't hurt yourself
    • The moving parts can potentially hurt your finger
    • The electronics - if used in a wrong way - can harm you
    • edges may be sharp, make sure you don't cut yourself
    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscope/index.html b/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscope/index.html index 31511fd6f..139695ec0 100644 --- a/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscope/index.html +++ b/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscope/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -26,7 +26,7 @@

    Prepare the Z-stage

    The documentation for the motorized 25mm Z-stage can be found here: https://openuc2.github.io/docs/PRODUCTION/PG_12_STAGE_Z_NEMA

    Once done, lock the Stage with the Puzzle piece with M5x8 worm screws:

    Prepare Electronics

    Bill of Material

    • UC2e v2 electronics
    • 3x A4988 Stepper driver
    • 12V power supply
    • USB micro cable
    • 3D printed case
    • 2x puzzle pieces
    • 8 M5x8 thread-only screws
    • 4x M3x8mm screws

    Assembly

    Attach the electronics board to the 3D printed assembly and tighten it with the M3 screws (cylindrical, Din906) Attach the puzzle pieces to the distal ends of the assembly and lock it with the M5 screws. For this the yet closed holes have to be opened by "drilling" it through.

    Prepare Triangle Structure

    Tubelens

    Bill of Material

    • Berrybase 100mm CCTV Lens
    • Daheng Vision IMX226 sensor
    • USB 3 Camera Cable
    • 2x Puzzlepieze
    • 8x M5x8 mm worm screw
    • 4x M3x18mm screw

    Assembly

    Adding the Baseplate

    Endstops and Illumination

    Skeleton

    Fully Assembled

    Fluo Extension

    Improvements

    Stage does not run smoothly

    You can release the pressure on the linear bearings by loosening the screws carefully. Make sure you don't introduce unneccesary play. The stage works with two v-grooves and balls in between.

    Additional images (have to be sorted)

    Safety

    TODO: Add additional information!

    • in case of shattered glass, make sure you don't cut yourself
    • Make sure you don't hurt yourself
    • The moving parts can potentially hurt your finger
    • The electronics - if used in a wrong way - can harm you
    • edges may be sharp, make sure you don't cut yourself
    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/APERTURES/index.html b/docs/PRODUCTION/Modules/APERTURES/index.html index 0ee089173..7b48e42c5 100644 --- a/docs/PRODUCTION/Modules/APERTURES/index.html +++ b/docs/PRODUCTION/Modules/APERTURES/index.html @@ -9,13 +9,13 @@ - - + +

    Apertures

    This page describes how to assemble the kinematic XY mount for moving a laser/ pinhole.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device.

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl
    #01-012BaseSpring LocksPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011Spring0,20€NONE
    #01-011Threaded Inserts M30,20€NONE
    #01-011Screw Ball head0,20€NONE

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/BEAMSPLITTER/index.html b/docs/PRODUCTION/Modules/BEAMSPLITTER/index.html index a57e80ee4..88055af09 100644 --- a/docs/PRODUCTION/Modules/BEAMSPLITTER/index.html +++ b/docs/PRODUCTION/Modules/BEAMSPLITTER/index.html @@ -9,13 +9,13 @@ - - + +

    KINEMATIC MIRROR (90°)

    This page describes how to assemble the partially transparent mirror (45°) module. It uses a Frontsurface 50% mirror that.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-01150% MirrorAstromedia 40x30mm PArtially Transparent Mirror8,00€Astromedia

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    caution

    Insert the beam splitter in such a way that the mirroring/coated surface points away from the printed part!

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/Camera/index.html b/docs/PRODUCTION/Modules/Camera/index.html index cff690090..ec4822fcd 100644 --- a/docs/PRODUCTION/Modules/Camera/index.html +++ b/docs/PRODUCTION/Modules/Camera/index.html @@ -9,15 +9,15 @@ - - + +

    USB Camera

    Duration: 1

    This page describes how to assemble the camera module. It has two different variations:

    • IMX214 (Arducam, LINK)
    • IMX179 (Waveshare, LINK)

    Bill of material

    Below you will find all components necessary to build this device

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    TypeDetailsLink
    Camera holderholds IMX179/214Part.stl

    Additional parts

    This is used in the current version of the setup

    TypeDetailsPriceLink
    USB CameraArducam USB IMX21440 €Amazon
    or---------
    USB CameraArducam USB IMX21440 €Amazon
    Mounting screwsDIN912 M3x12mm Screws0.40 €Würth

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    IMX219 (Arducam)

    STEP 1

    All parts you need to assemble the module:

    Remove the lens (M12/cellphone) from the camera board.

    STEP 2

    Use the DIN912 M3x12mm screws to mount the camera securely. ATTENTION: Use the decentered holes such that the Camera is placed in the inserts's center!

    STEP 3

    Add the insert to the cube, close it and store the cable safely.

    IMX179 (Waveshare)

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    Remove the lens (M12) from the camera board using a cross key.

    STEP 3

    Use the DIN912 M3x12mm screws to mount the camera securely. ATTENTION: Use the centered screws to have the camera's lens in the center of the module!

    STEP 4

    To remove the lens take a cloth/tissue and pliers and knock it of with some force. Don't break the part!

    STEP 5

    Add the part to the cube and you're done.

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/Eyepiece/index.html b/docs/PRODUCTION/Modules/Eyepiece/index.html index 61a33bb62..961372cb6 100644 --- a/docs/PRODUCTION/Modules/Eyepiece/index.html +++ b/docs/PRODUCTION/Modules/Eyepiece/index.html @@ -9,13 +9,13 @@ - - + +

    Eyepiece

    Duration: 1

    This page describes how to assemble the Eyepiece module. It uses a standard eyepiece with 22mm diameter.

    Bill of material

    Below you will find all components necessary to build this device

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    TypeDetailsLink
    Holderholds EyepiecePart.stl

    Additional parts

    This is used in the current version of the setup

    TypeDetailsPriceLink
    Eyepiece10x, 18mm Eyepiece10 €Aliexpress

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    Remove XX

    STEP 2

    Use Hotglue to permanently mount the eyepiece to the insert.

    STEP 3

    Add the insert to the cube, close it and store it safely.

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/KIN_MIR_45/index.html b/docs/PRODUCTION/Modules/KIN_MIR_45/index.html index 138d43ca1..eeb6eaa79 100644 --- a/docs/PRODUCTION/Modules/KIN_MIR_45/index.html +++ b/docs/PRODUCTION/Modules/KIN_MIR_45/index.html @@ -9,13 +9,13 @@ - - + +

    KINEMATIC MIRROR (45°)

    This page describes how to assemble the kinematic mirror (45°) module. It uses a Frontsurface mirror that can be tuned in 3 axis

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds screwsPart.stl
    #01-021Mirror Mountholds mirror and is movablePart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011FS MirrorAstromedia 40x30mm Frontsurface Mirror5,00€Astromedia
    #01-024Screw (orings)M3x12, DIN9125,00€Astromedia
    #01-033Screw (Pushing)M3x20, NOT DECIDED!15,00€Link
    #01-042O-Ringr=8mm0,10€Link
    #01-051Cube5,00€Link

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    STEP 3

    STEP 4

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/KIN_MIR_90/index.html b/docs/PRODUCTION/Modules/KIN_MIR_90/index.html index 62192988f..975cf4613 100644 --- a/docs/PRODUCTION/Modules/KIN_MIR_90/index.html +++ b/docs/PRODUCTION/Modules/KIN_MIR_90/index.html @@ -9,13 +9,13 @@ - - + +

    KINEMATIC MIRROR (90°)

    This page describes how to assemble the kinematic mirror (90) module. It uses a Frontsurface mirror that can be tuned in 3 axis

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds screwsPart.stl
    #01-021Mirror Mountholds mirror and is movablePart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011FS MirrorAstromedia 40x30mm Frontsurface Mirror5,00€Astromedia
    #01-024Screw (orings)M3x12, DIN9125,00€Astromedia
    #01-033Screw (Pushing)M3x20, NOT DECIDED!15,00€Link
    #01-042O-Ringr=8mm0,10€Link
    #01-051Cube5,00€Link

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    STEP 3

    STEP 4

    STEP 5

    STEP 6

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/KIN_XY_LASER/index.html b/docs/PRODUCTION/Modules/KIN_XY_LASER/index.html index c11455cdf..a3795c3a9 100644 --- a/docs/PRODUCTION/Modules/KIN_XY_LASER/index.html +++ b/docs/PRODUCTION/Modules/KIN_XY_LASER/index.html @@ -9,13 +9,13 @@ - - + +

    Kinematic XY Mount

    This page describes how to assemble the kinematic XY mount for moving a laser/ pinhole.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device.

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl
    #01-012BaseSpring LocksPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011Spring0,20€NONE
    #01-011Threaded Inserts M30,20€NONE
    #01-011Screw Ball head0,20€NONE

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    STEP 3

    STEP 4

    STEP 6

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/LENS/index.html b/docs/PRODUCTION/Modules/LENS/index.html index 11ebadb7e..489c18176 100644 --- a/docs/PRODUCTION/Modules/LENS/index.html +++ b/docs/PRODUCTION/Modules/LENS/index.html @@ -9,14 +9,14 @@ - - + +

    LENS

    Duration: 1

    This page describes how to assemble the LEns module. It uses a 40mm biconvex/biconcave glasslens with varyin diameter

    Bill of material

    Below you will find all components necessary to build this device

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDTypeDetailsLink
    #01-01Holderholds LensPart.stl
    #01-02Lidfixes lens using screw mechanismPart.stl

    Additional parts

    This is used in the current version of the setup

    IDTypeDetailsPriceLink
    #01-03Lens (50mm)f'=50mm, Diameter 40mm2 €Aliexpress
    or
    #01-04Lens (100mm)f'=100mm, Diameter 40mm2 €Aliexpress
    or
    #01-05Lens (-50mm)f'=-50mm, Diameter 40mm2 €Aliexpress

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    Thread in the round piece tand fix the lens. Make sure you use the right ring for the right lens!

    caution

    Clean the lenses with a cotton cloth.

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/MIR_45/index.html b/docs/PRODUCTION/Modules/MIR_45/index.html index 95b795d37..a5def31af 100644 --- a/docs/PRODUCTION/Modules/MIR_45/index.html +++ b/docs/PRODUCTION/Modules/MIR_45/index.html @@ -9,13 +9,13 @@ - - + +

    Mirror (45°)

    This page describes how to assemble the standard fold mirror (45°) module. It uses a Frontsurface mirror.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011MirrorAstromedia 40x30mm PArtially Transparent Mirror8,00€Astromedia

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    STEP 3

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/POLARIZER_ROTATING/index.html b/docs/PRODUCTION/Modules/POLARIZER_ROTATING/index.html index 9f504de38..13b6a2a19 100644 --- a/docs/PRODUCTION/Modules/POLARIZER_ROTATING/index.html +++ b/docs/PRODUCTION/Modules/POLARIZER_ROTATING/index.html @@ -9,13 +9,13 @@ - - + +

    Polarization Rotator

    This page describes how to assemble the kinematic XY mount for moving a laser/ pinhole.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device.

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl
    #01-012BaseSpring LocksPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011Spring0,20€NONE
    #01-011Threaded Inserts M30,20€NONE
    #01-011Screw Ball head0,20€NONE

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    STEP 3

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/SAMPLE_HOLDEr/index.html b/docs/PRODUCTION/Modules/SAMPLE_HOLDEr/index.html index eb6f72cc2..8e3b81a13 100644 --- a/docs/PRODUCTION/Modules/SAMPLE_HOLDEr/index.html +++ b/docs/PRODUCTION/Modules/SAMPLE_HOLDEr/index.html @@ -9,13 +9,13 @@ - - + +

    Sample Holder

    This page describes how to assemble the kinematic XY mount for moving a laser/ pinhole.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device.

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl
    #01-012BaseSpring LocksPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011Spring0,20€NONE
    #01-011Threaded Inserts M30,20€NONE
    #01-011Screw Ball head0,20€NONE

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    :::alert

    this has been updated with magnets! Make sure you label the magnets with prior to insertion so that polarity is correct.

    :::

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/STAGE_Z_MANUAL/index.html b/docs/PRODUCTION/Modules/STAGE_Z_MANUAL/index.html index dd23af7e3..4daa7a41c 100644 --- a/docs/PRODUCTION/Modules/STAGE_Z_MANUAL/index.html +++ b/docs/PRODUCTION/Modules/STAGE_Z_MANUAL/index.html @@ -9,13 +9,13 @@ - - + +

    Kinematic XY Mount / Laser

    This page describes how to assemble the kinematic XY mount for moving a laser/ pinhole.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device.

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl
    #01-012BaseSpring LocksPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011Spring0,20€NONE
    #01-011Threaded Inserts M30,20€NONE
    #01-011Screw Ball head0,20€NONE

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    STEP 3

    STEP 4

    STEP 5

    STEP 6

    STEP 7

    STEP 8

    STEP 9

    STEP 10

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/STAGE_Z_NEMA/index.html b/docs/PRODUCTION/Modules/STAGE_Z_NEMA/index.html index 80d1a15d9..dc0b7d62f 100644 --- a/docs/PRODUCTION/Modules/STAGE_Z_NEMA/index.html +++ b/docs/PRODUCTION/Modules/STAGE_Z_NEMA/index.html @@ -9,13 +9,13 @@ - - + +

    Z-Stage Motorized NEMA12 25mm

    This page describes how to assemble the kinematic XY mount for moving a laser/ pinhole.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device.

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl
    #01-012BaseSpring LocksPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011Spring0,20€NONE
    #01-011Threaded Inserts M30,20€NONE
    #01-011Screw Ball head0,20€NONE

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    STEP 3

    STEP 4

    STEP 5

    STEP 6

    STEP 7

    STEP 8

    STEP 9

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/TORCH/index.html b/docs/PRODUCTION/Modules/TORCH/index.html index 961bed060..de1eac271 100644 --- a/docs/PRODUCTION/Modules/TORCH/index.html +++ b/docs/PRODUCTION/Modules/TORCH/index.html @@ -9,13 +9,13 @@ - - + +

    Torch

    This page describes how to assemble the kinematic XY mount for moving a laser/ pinhole.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device.

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl
    #01-012BaseSpring LocksPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011Spring0,20€NONE
    #01-011Threaded Inserts M30,20€NONE
    #01-011Screw Ball head0,20€NONE

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCN/index.html b/docs/Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCN/index.html index 0cdf8ca67..5600802c0 100644 --- a/docs/Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCN/index.html +++ b/docs/Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCN/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -22,7 +22,7 @@ 显微镜物镜: 一种特殊的透镜系统,可以放大物体

    接下来将通过一个视频介绍一个MiniBox(2022年10月的版本)里面有什么:

    什么是UC2?

    UC2项目的核心元素是一个简单的立方体。 这个立方体由两半部分组成,通常内有一个可以滑动的插件。 插件可以容纳各种光学元件(例如透镜、反光镜),这意味着每个立方体可以通过安装不同的插件来实现不同的功能。

    立方体类型1:带插头连接注塑成型件

    底板

    底板

    立方体可以安装在底板上。底板模块可以像拼图一样组合在一起。

    自行打印UC2

    UC2立方体也可以3D打印。它看起来与注塑模型相同,但这里它由立方体盖和立方体体组成,用螺丝固定在一起。螺丝非常适合放在磁性板上。通过结合不同的立方体模块,可以轻松组装不同的光学结构。每个骰子可以增加一个新功能。你的创造力没有限制。

    立方体类型2:带磁性连接的3D打印件

    立方体

    带磁铁的底板

    在3D打印的底板中有小的球形磁铁,立方体就放在这些磁铁上。

    想要更多的立方体?那你可以自行3D打印。你可以在这里找到所有信息

    这就是立方体如何组合在一起

    持续时间:1分钟

    确保立方体正确放置在板上,并且没有倾斜。最后重要的是插件放置在正确的位置。

    如果你没有看到清晰的图像,移动插件(例如透镜),直到你看到它很清楚。图片中的绿色箭头显示了如何做。

    这里你可以找到一个小视频,解释了立方体的核心概念

    文档内容:

    符号是什么意思?

    实验 如果你看到这个方块,说明有实验可做!你可以在这个方块上放置一个UC2立方体。
    解释:如果你看到这个图标,说明有东西可以学习!
    账单:这里有东西需要计算。拿起笔和纸开始解谜。
    注意:不要用手指触摸玻璃表面!
    清洁镜头:如果你已经触摸了镜头,你可以用眼镜布来清洁它。

    透镜能做什么?

    持续时间:2分钟

    拿一个或多个内有透镜的立方体,看着这里展示的UC2符号。手持立方体,改变透镜和图像之间的距离。

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreLens/index.html b/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreLens/index.html index c9684ca00..9446a22cc 100644 --- a/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreLens/index.html +++ b/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreLens/index.html @@ -9,15 +9,15 @@ - - + +

    Lens

    Lenses

    In ray optics, light is represented as a bundle of rays (arrows), which simplifies the physical properties of light. A ray has a direction and is therefore drawn with an arrow. A lens "refracts" the beam, changing its direction.

    The focal length of a lens corresponds to the distance from the lens to the focal plane on which the focal point lies. It is given in millimeters (f = mm).

    Converging (positive) and diverging (negative) lenses

    Converging lenses refract the rays of light traveling parallel to the optical axis at a point called the focal point.

    The diverging lenses refract the rays of light traveling parallel to the optical axis as if they originated from a point called the "virtual" focus.

    Lenses “refract” the rays of light

    You can find the focal length of the lens as a printed number on the lens holder. The MiniBOX receives a 100mm converging lens, two 40mm converging lenses and a -50mm negative lens. The numbers indicate the focal length.

    The converging lens is also called a positive or convex lens. The middle part of the lens is always thicker than the edge.

    The converging lens enlarges the image. The magnification is different for the 40mm lens and the 100mm lens. The image can be upright or inverted.

    The negative lens (spreading lens) is sometimes also called a negative or concave lens. The middle part of the lens is always thinner than the edge.

    With the negative lens (here: -50 mm lens) the image is always reduced and always upright

    We assume that our lenses are so-called "thin lenses". This means we can consider them as one plane and not care about their thickness. This makes explanations and calculations much easier.

    Did the answers raise any more questions? Then drive to find out exactly how lenses work...

    Lens image

    Now take the lentil cubes. With the right lens, try to decipher the focal length information in the cubes shown. Move the lens over the writing until it is the same size as the "UC2" text.

    Can you see the text the same size and orientation as the "UC2"? What happens when you change the distance between the lens and the image?

    What happens if you use a lens with the wrong focal length?

    Image of an object through a positive lens

    Let's take the converging lens as an example. We start with an object (green arrow) and see what happens to the rays that start from the top. There are infinitely many rays in all directions, but for drawing the figure the following three rays will suffice:

    1. The centre beam (orange) passes undisturbed through the center of the lens.
    2. The focus ray (yellow) also starts from the tip of the arrow, but goes through the object-side focus at focal length f. After the lens, it continues at the same height, but now parallel to the optical axis.
    3. The parallel beam (red) initially runs parallel to the optical axis, but is then refracted at the lens in such a way that it passes through the focal point on the image side at focal length f.

    The image is formed where all the rays intersect. The principle is used for all points or the rays of an object emanating from them. Depending on which lens is used and depending on the position of the object, the properties of the image change, such as size, orientation and position.

    Image of an object through a negative lens

    In the case of the negative lens, we use the same method to image the ray path. Unlike the case of the converging lens, the image is always reduced and virtual. Magnification depends on the position of the object in front of the lens. Unlike the converging lens, the image is created on the object side and is therefore called a virtual image. You can see it directly with your eyes but not project it onto a screen.

    The way a lens creates an image is predictable by knowing the focal length of that lens. Therefore, a certain distance must be maintained so that you can see the writing with the specified lens on the previous sheet.

    The magnification and the location where the image is formed depend on the focal length of the lens and the distance between the lens and the object.

    With the diverging lens (f = -50 mm) you always see a reduced virtual image. A virtual image can only be viewed with the eye. So far we only have virtual ones seen pictures.

    The converging lens as a magnifying glass

    Take the UC2 lens cube with focal length f=40mm and use it as a magnifying glass.

    Can you read the small letters through the converging lens? What is written there?

    A lens in action can be found here:

    That's what converging lenses do

    With the converging lenses, the image and the magnification depend on the position of the object.

    If the distance between the object and the lens is more than twice the focal length of the lens, then the image is...

    • Vice versa
    • Swapped sides
    • Reduced
    • Real

    If the distance between the object and the lens is exactly twice the focal length of the lens, then the image is...

    • Vice versa
    • Swapped sides
    • Same size
    • Real

    If the distance between the object and the lens is more than the focal length and less than twice the focal length of the lens, then the image is...

    • Vice versa
    • Swapped sides
    • Magnified
    • real

    Object distance (g)

    The distance between the object and the lens plane is called g.

    Image width (b)

    The distance between the lens plane and the image formed by the lens is denoted as b.

    The converging lens can produce a real image. The real image can then be seen on a screen.

    That's why the magnifying glass enlarges

    Magnifying glass effect!

    If the distance between the object and the lens is less than the focal length of the lens, then the image is...

    • upright
    • right side up
    • Magnified
    • Virtual

    The magnifying glass is the simplest of all optical devices, since it consists only of a simple converging lens with a suitable focal length. Why does the cube with the 50 𝑚𝑚 enlarge the small text? If the object is in front of the focal length of the lens - i.e. less than 50 𝑚𝑚 in front of the lens - the lens creates a virtual image which is behind the actual object. The eye perceives it enlarged. Check out the diagram above.

    Calculate the magnification of the magnifying glass using the following formula:

    250 𝑚𝑚 is the distance of clear visual range - i.e. the distance between the object and the eye at which most people can read well. More on this later in the “accommodation” of the eye.

    How does a cinema projector work?

    Take the UC2 lens cube with focal length 𝑓 =40 𝑚𝑚 and place it behind the sample holder cube. The distance between the object and the lens (i.e. the object distance g) should be approx. 50 mm. If you now illuminate the object with the flashlight, you will see it sharply at a distance of approx. 200 mm on the wall. A cinema projector has a film strip instead of the object and of course a much stronger light source.

    Use a flashlight (e.g. from your cell phone) as a light source and hold it in front of the object

    Use the image or text on the microscope slide as the object

    How is the image oriented? Slide the lens back and forth in the cube and see when the image is in focus. Find the image for g = 50mm, 60mm, 65mm and measure the distance between the lens and the image.

    How does a cinema projector work?

    Where is the picture?

    When an object is imaged through a converging lens, the position and size of the image depend on the distance (g) of the object to the lens and its focal length (f). The lens equation describes the relationship between image distance (b) and object distance (g):

    How big is the picture?

    The magnification of the object on the screen can easily be calculated using the following formula:

    How the projector works

    Check if your observation agrees with the calculation

    Calculate the magnification of the projector for the different values of g and b.

    Our lens has a focal length of f= 40 mm.

    For g = 50mm → b = 200mm

    For g = 60 mm → b = 120 mm\

    For g = 65 mm → b = 104 mm\


    The projector always produces an enlarged, inverted (reversed) image. The position of the image and its magnification depend on the position and size of the object.

    Tutorial: Determining the Focal Distance of a Positive Lens

    Materials needed:

    • Light source (e.g., room's illumiation)
    • Positive lens
    • Screen (e.g. table, piece of paper, etc.)

    Instructions:

    1. Position the positive lens so that it faces the light source. Align a screen parallel to the focal plane of the lens.
    2. Modify the distance between the lens and the screen.
    3. Carefully observe and record the position at which the light source forms a clear image on the surface of the screen.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreTelescope/index.html b/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreTelescope/index.html index 41d4ca2e9..92f4bbceb 100644 --- a/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreTelescope/index.html +++ b/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreTelescope/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -27,7 +27,7 @@ Search for an object to the distance and use Galileo's telescope to look at it.

    What is a Kepler telescope?

    Set the lenses in the correct positions as shown in the diagram. Then look through the telescope into the distance.

    What does the picture look like? How is the image oriented?

    As you look through the telescope, vary the distances between the components to see such a sharp image!

    This is a Kepler telescope

    This type of telescope is often used in astronomy.

    This is how the Kepler telescope works

    What is the magnification of this Kepler telescope?

    Formula for calculating magnification

    This telescope can achieve a higher magnification than the Galilean telescope. But it creates the opposite picture. However, this is not a problem for observing the stars.

    The picture is always
    • Magnified by the magnification from the formula above
    • Vice versa
    • Sides reversed

    The field of view is larger than with the Galileo telescope.


    Tutorial: Kepler's Telescope

    Materials needed:

    • Eight base plates
    • 100 mm positive lens (in cube)
    • 50 mm positive lens (in cube)
    • Two empty cubes

    Diagram (side view):

    Instructions for assembling Kepler's telescope:

    Step 1: Align the cubes

    Align the cubes such that the two lenses lay at the extremes and the two empty cubes in the middle.

    Step 2: Fix the cubes with base plates

    Fix the cubes with the base plates placing them on top and on the bottom.

    Step 3: Adjust the distance

    Adjust the distance between the lenses as shown in the image.

    Step 4: Use Kepler's telescope

    Look for an object to the distance and use Kepler's telescope to look at it.

    What is a spotting scope?

    The spotting scope is long, so the scheme is not the same size. Set the lenses in the correct positions as shown in the diagram and look into the distance through the telescope.

    which results into

    How does the image here compare to the Kepler telescope?

    As you look through the telescope, adjust the distances between the components to see a sharp image!

    This is how the spotting scope works

    The magnification is like that of the Kepler telescope. The erecting lens only changes the orientation (the image is reversed), not the magnification.

    An upright image is necessary for terrestrial observations. True terrestrial telescopes use prism systems to rotate the image and keep it compact.

    The picture is
    • Magnified at the same magnification as the Keppler telescope
    • Upright
    • mirrored

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/ENGLISH/coreMicroscope/index.html b/docs/Toolboxes/DiscoveryCore/ENGLISH/coreMicroscope/index.html index 83ca3877d..4ff0f53fe 100644 --- a/docs/Toolboxes/DiscoveryCore/ENGLISH/coreMicroscope/index.html +++ b/docs/Toolboxes/DiscoveryCore/ENGLISH/coreMicroscope/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -29,7 +29,7 @@

    Materials needed:

    • Microscope objective 4x
    • Microscope objective long mount with gear
    • Ramsden-Eyepiece (in cube)
    • Two non-kinematic mirrors (in cubes)
    • Sample holder (in cube)
    • Three empty cubes
    • 11 base plates
    • Smartphone base plate
    • Torch lamp
    • 50 mm lens (in cube)

    Diagram (Side view):

    Instructions for assembling the Smartphone Microscope:

    Step 1: Build a four-base plate line

    Step 2: Assemble the components

    Place the Microscope objective mount on one extreme followed by the two mirrors facing each other and one empty cube in the other extreme. Fix them with base plates.

    Step 3: Adjust the objective

    Build one cube with the microscope objective inside. Adjust the objective's height if necessary by using the gear.

    Step 4: Place the eyepiece

    Place the eyepiece next to the microscope objective and one empty cube next to it. Mind the right orientation of the eyepiece.

    Step 5: Align the smartphone base

    Place the smartphone base with the hole aligned with the eyepiece. Note: You can adjust the orientation of the smartphone base to adapt your smartphone's size.

    Step 6: Set up the sample holder

    Place the sample holder cube on top of the microscope objective. Mind the distance between them. You can adjust the coarse distance by sliding the sample holder inside the cube and the finer distance by using the gear.

    Step 7: Add the converging lens and lamp

    Place a converging lens cube on top of the sample holder cube and place the torch lamp on top. Place the smartphone aligned to the eyepiece.

    Step 8: Adjust for clarity

    Try to move the smartphone such that the whole eyepiece circle appears illuminated. Then, turn the gear to focus and get a sharp image of the specimen.

    Better with smartphone or eye?

    The smartphone camera has a lens with a very short focal length because it has to fit into the thin smartphone. The lens then creates an image on the camera sensor whose properties are similar to those of the human eye.

    The eye can see objects from both a distance and near. This property is called accommodation.

    The smartphone camera can also do this, but it is called autofocus. It describes the ability to sharply image objects at different distances on the sensor.

    The image from the eyepiece comes in parallel rays, as if coming from infinity. You observed with a relaxed eye (looking into the distance) or with a camera focused at infinity.


    Calculation results

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/ENGLISH/picturedTutorial/index.html b/docs/Toolboxes/DiscoveryCore/ENGLISH/picturedTutorial/index.html index a37ce4402..cf7b19cb4 100644 --- a/docs/Toolboxes/DiscoveryCore/ENGLISH/picturedTutorial/index.html +++ b/docs/Toolboxes/DiscoveryCore/ENGLISH/picturedTutorial/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -48,7 +48,7 @@

    Step 6: Setup and alignment

    Place the camera on the sample arm as shown. Put the screen on the other arm exit. Place the sample holder using one half of the cube at a time to not collide with the microscope objective.

    Turn the laser on and use the screen to align both beams using the screws on the reference mirror.

    Step 7: Connect and adjust in the MVS app

    Connect the camera to the computer and open the MVS app. Block the reference beam. Move the coverslide such that your sample enters the FoV (Field of View). Unblock the reference beam. Zoom into the image to distinguish the fringe pattern in the MVS camera display. Adjust the angles of the reference mirror using the screws to change the fringe pattern as shown.

    Step 7: Data processing

    Process the data. Phase unwrapping possible.

    First Tests with Modifications to the Original Setup

    Using Lei code, the need of a linear stage for the sample was identified. Adjusting the objective and tube lens enhances the interference, making it crucial to use the ImSwitch interface to see the FFT in real time and optimize. The final goal is to move the position of the first order interference to use Lei algorithm (or some Phase unwrapping algorithm) to retrieve the Phase. To achieve this, two images need to be acquired: a sample image and a background image (without a cover slide or a slide region with no specimen).

    Result of Phase Unwrapping

    Install MVS App for Camera Utilization

    Camera model: MV-CE060-10UC. Visit the HIKROBOTICS website and download the MVS software suitable for your computer. Below are steps exemplifying the software installation for Mac.

    Install the downloaded file.

    Open the MVS Software.

    You should see the following window.

    Connect the camera. Refresh the USB line to detect the camera.

    Select the make-link button on the detected camera.

    The following window should be displayed.

    Click on the play button in the actions bar of the camera.

    If properly connected, you should see a real-time image. Adjust the exposure if the image is overexposed.

    To adjust the exposure time, go to the Feature tree, select the Acquisition Control Category, and change the Exposure Auto option to Continuous.

    Now, a clear image with good contrast should be visible.

    To stop recording, click on the stop button in the camera's actions bar.

    To disconnect the camera, click on the break-link button next to the detected camera in the USB devices list.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxEN/index.html b/docs/Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxEN/index.html index 0bca517fb..101d436f0 100644 --- a/docs/Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxEN/index.html +++ b/docs/Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxEN/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -23,7 +23,7 @@ docs/01_Toolboxes/01_DiscoveryCore/IMAGES/MINIBOX/2.png

    If you don't see a sharp image, move the inserts (e.g. lens) until you see it clearly. The green arrow in the picture shows you how to do this.

    Here you can find a small video that explains the core concept of the cube

    What do the symbols mean?

    Duration: 2

    Experiment If you see this block, there is something to experiment with! You can place a UC2 cube on this block.
    Explanations: If you see this icon, there's something to learn!
    Invoices: There is something to calculate here. Take a pen and paper and start puzzles.
    Caution: Do not touch the glass surfaces with your fingers!
    Cleaning the lenses: If you have already touched the lens, you can clean it with a glasses cloth.

    What can a lens do?

    Duration: 2

    Take one or more of the cubes that have a lens in them and look at the UC2 symbol shown here. Hold the cube in your hand and change the distance between the lens and the image.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/GERMAN/CoreIntro/index.html b/docs/Toolboxes/DiscoveryCore/GERMAN/CoreIntro/index.html index 2b84e0a2e..658862213 100644 --- a/docs/Toolboxes/DiscoveryCore/GERMAN/CoreIntro/index.html +++ b/docs/Toolboxes/DiscoveryCore/GERMAN/CoreIntro/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -20,7 +20,7 @@ Mikroskopobjektiv: ein spezielles Linsensystem, welches ein Objekt vergrößert

    Ein Überblick über den Inhalt einer Kiste (Englisch) könnt ihr hier finden (Stand Okt. 2022):

    Was ist UC2?

    Das Kernelement des UC2-Projekts ist ein einfacher Würfel. Der Würfel besteht aus zwei Hälften und beherbergt einen verschiebbaren Einsatz. Der Einsatz kann verschiedene optische Komponenten (z.B. Linsen, Spiegel) halten, womit sich mit jedem Würfel unterschiedliche Funktionen realisieren lassen.

    Würfeltyp 1: Spritzguss mit Steckverbindung

    Grundplatte

    Grundplatte

    Der Würfel lässt sich auf eine Grundplatte montieren. Die Grundplattenmodule lassen sich wie Puzzle zusammenstecken.

    UC2 zum Selberdrucken

    Duration: 3

    Der UC2-Würfel lässt sich auch 3D-drucken. Er sieht genauso aus wie das Spritzgussmodell, besteht hier aber aus einem Würfel-Deckel und dem Würfel-Körper, die mit Schrauben zusammengehalten werden. Die Schrauben eignen sich super dafür, auf die magnetischen Platte gesteckt zu werden. Durch Kombination verschiedener Würfelmodule lassen sich ganz einfach verschiedene optische Aufbauten zusammenbauen. Mit jedem Würfel kann eine neue Funktion hinzugefügt werden. Deiner Kreativität sind keine Grenzen gesetzt.

    Würfeltyp 2: 3D gedruckt mit Magnetverbindung

    Würfel

    Grundplatte mit Magneten

    In der 3D gedruckten Grundplatte sind kleine Kugelmagnete auf die die Würfel gesteckt werden.

    Willst du mehr Würfel haben? Dann kannst du sie selbst bauen. Du findest alles hier

    So passen die Würfel zusammen

    Duration: 1

    Achte darauf, dass die Würfel richtig auf der Platte stecken und nicht verkippt sind. Am Ende ist es wichtig, dass die Einsätze an der richtigen Stelle sitzen.

    Wenn du kein scharfes Bild siehst, verschiebe die Einsätze (z.B. Linse) so lange, bis du es klar siehst. Der grüne Pfeil im Bild zeigt dir, wie das geht.

    Ihr findet auch eine kleine Anleitung in dem (englischen) Video:

    Was bedeuten die Symbole?

    Duration: 2

    Experiment Wenn du diesen Block siehst, gibt es etwas zu experimentieren! Auf diesen Block kannst du einen UC2 Würfel legen.
    Erklärungen: Wenn du dieses Symbol siehst, gibt es etwas zu lernen!
    Rechnungen: Hier gibt es was zu Rechnen. Nimm einen Stift und ein Papier und knobel los.
    Vorsicht: Berühre die Glasoberflächen nicht mit den Fingern!
    Reinigung der Linsen: Wenn du die Linse berit berührt hat, kannst du sie mit einem Brillentuch reinigen.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/GERMAN/CoreLinse/index.html b/docs/Toolboxes/DiscoveryCore/GERMAN/CoreLinse/index.html index 487da976c..534b052da 100644 --- a/docs/Toolboxes/DiscoveryCore/GERMAN/CoreLinse/index.html +++ b/docs/Toolboxes/DiscoveryCore/GERMAN/CoreLinse/index.html @@ -9,14 +9,14 @@ - - + +

    Linse

    Was kann eine Linse?

    Nimm einen oder mehrere der Würfel in denen eine Linse steckt und schaue dir das hier abgebildet UC2 Symbol dadurch an. Halte den Würfel dabei in der Hand und verändere den Abstand zwischen der Linse und dem Bild.

    Linsen

    In der Strahlenoptik wird das Licht als Strahlenbündel (Pfeile) dargestellt, was eine Vereinfachung der physikalischen Eigenschaften von Licht mit sich bringt. Ein Strahl hat eine Richtung und wird daher mit einem Pfeil gezeichnet. Eine Linse „bricht“ denn Strahl und ändert so seine Richtung.

    Die Brennweite einer Linse entspricht hier der Distanz von der Linse zur Brennebene, auf der der Brennpunkt liegt. Sie wird in Millimeter angegeben (f = mm).

    Sammellinsen

    Sammellinsen brechen die Lichtstrahlen, die parallel zur optische Achse laufen, in einem Punkt, der Brennpunkt gennant wird.

    Zerstreuungslinsen

    Die Zerstreuungslinsen brechen die Lichtstrahlen, die parallel zur optische Achse laufen, als würden einem Punkt entspringen, der „virtuellen“ Brennpunkt gennant wird.

    Linsen „brechen“ die Lichtstrahlen

    Die Brennweite der Linse findest du als eine gedruckte Nummer auf den Linsenhaltern. Bie MiniBOX/CoreBOX erhält eine 100-mm-Sammellinse, zwei 40-mm-Sammellinse und eine -50-mm-Zersteruungslinse. Die Zahlen geben die Brennweite an.

    Die Sammellinse heißt auch positive oder konvexe Linse. Der mittlere Teil der Linse ist immer dicker als der Rand.

    Mit der Sammellinse wird das Bild vergrößert. Die Vergrößerung ist für die 40-mm-Linse und die 100-mm-Linse unterschiedlich. Das Bild kann aufrecht oder umgekehrt sein.

    Die Zerstreuungslinse (Streulinse) heißt manchmal auch negative oder konkave Linse. Der mittlere Teil der Linse ist immer dünner als der Rand.

    Mit der Zerstreuungslinse (hier: -50-mm-Linse) ist das Bild immer verkleinert und immer aufrecht

    Wir gehen davon aus, dass unsere Linsen so genannte "dünne Linsen" sind. Das bedeutet, dass wir sie als eine Ebene betrachten können und uns nicht um ihre Dicke kümmern. Das macht Erklärungen und Berechnungen viel einfacher.

    Haben die Antworten noch mehr Fragen aufgeworfen? Dann fahre, um herauszufinden, wie genau Linsen funktionieren…

    Linsen- Abbildung

    Nimm jetzt die Linsen-Würfel zur Hand. Versuche, mit der passenden Linse die jeweilige Angabe über Brennweite in den abgebildeten Würfeln zu entziffern. Bewege die Linse über der Schrift bis sie die gleiche Größe hat wie der Text "UC2".

    Kannst du den Text in der gleichen Größe und Ausrichtung wie das "UC2" sehen? Was passiert, wenn du den Abstand zwischen der Linse und dem Bild veränderst?

    Was passiert, wenn du eine Linse mit der falschen Brennweite verwendest?

    Abbildung eines Gegenstandes durch eine positive Linse

    Als Beispiel nehmen wir die Sammellinse. Wir starten mit einem Gegenstand (grüner Pfeil) und schauen, was mit den Strahlen passiert, die von der Spitze aus starten. Es gibt unendlich viele Strahlen in alle Richtung, für das Zeichnen der Abbildung reichen uns aber die drei folgenden Strahlen:

    1. Der Mittelpunktstrahl (orange) geht ungestört durch das Zentrum der Linse.
    2. Der Brennpunktstrahl (gelb) startet auch von der Spitze des Pfeils, geht aber durch den objektseitigen Brennpunkt in der Brennweite f. Nach der Linse verläuft er auf gleicher Höhe weiter, aber nun parallel zur optischen Achse.
    3. Der Parallelstrahl (rot) läuft zunächst parallel zur optischen Achse, wird dann aber an der Linse so gebrochen, dass er durch den bildseitigen Brennpunkt in der Brennweite f verläuft.

    Das Bild entsteht dort, wo sich alle Strahlen schneiden. Das Prinzip macht man für alle Punkte bzw. die davon ausgehenden Strahlen eines Objekts. Je nach dem, welche Linse benutzt wird und je nach Position des Objekts verändern sich die Eigenschaften des Bildes, die Große, Orientierung und Position.

    Abbildung eines Gegenstandes durch eine negative Linse

    Im Fall der Zerstreuungslinse benutzen wir die gleiche Methode, um den Strahlengang abzubilden. Anders als im Fall der Sammellinse ist das Bild immer verkleinert und virtuell. Die Vergrößerung ist von der Position des Gegenstands vor der Linse abhängig. Anders als bei der Sammellinse entsteht das Bild auf der Objektseite und heißt damit virtuelles Bild. Du kannst es direkt mit deinen Augen sehen aber nicht auf einen Bildschirm projizieren.

    Die Art und Weise, wie eine Linse ein Bild erzeugt, ist vorhersehbar, wenn man die Brennweite dieser Linse kennt. Deshalb muss ein bestimter Abstand eingehalten werden, damit du die Schrift mit derangegebenen Linse auf dem vorhergehenden Blatt erkennst.

    Die Vergrößerung und der Ort, an dem das Bild entsteht, hängen von der Brennweite der Linse und vom Abstand zwischen Linse und Objekt ab.

    Mit der Zerstreuungslinse (f = -50 mm) siehst du immer ein verkleinertes virtuelles Bild. Ein virtuelles Bild kann nur mit dem Auge betrachtet werden. Bislang haben wir nur virtuelle Bilder gesehen.

    Die Sammellinse als Lupe

    Nimm den UC2 Linsen-Würfel mit der Brennweite f = 40 mm und benutze ihn als eine Lupe.

    Kannst du durch die Sammellinse die kleine Buchstaben lesen? Was steht dort geschrieben?

    Eine Sammellinse in Aktion (Englisch) wird hier gezeigt:

    Das machen Sammellinsen

    Mit die Sammellinsen hängt die Abbildung und die Vergrößerung von der Position des Gegenstands ab.

    Wenn der Abstand zwischen Objekt und Linse mehr als dem doppelten der Brennweite der Linse entspricht, dann ist das Bild …

    • Umgekehrt
    • Seitenvertauscht
    • Verkleinert
    • Reell

    Wenn der Abstand zwischen Objekt und Linse genau der doppelten der Brennweite der Linse entspricht, dann ist das Bild…

    • Umgekehrt
    • Seitenvertauscht
    • Gleich groß
    • Reell

    Wenn der Abstand zwischen Objekt und Linse mehr als die Brennweite und weniger als dem doppelten der Brennweite der Linse entspricht, dann ist das Bild ist…

    • Umgekehrt
    • Seitenvertauscht
    • Vergrößert
    • Reell

    Gegenstandsweite (g)

    Der Abstand zwischen dem Objekt und der Linsenebene wird als g bezeichnet.

    Bildweite (b)

    Der Abstand zwischen der Linsenebene und dem von der Linse erzeugten Bild wird als b bezeichnet.

    Die Sammellinse kann ein reelles Bild erzeugen. Das reelle Bild sieht man dann auf einem Bildschirm.

    Darum vergrößert die Lupe

    Lupeneffekt!

    Wenn der Abstand zwischen Objekt und Linse weniger als der Brennweite der Linse entspricht, dann ist das Bild ist…

    • Aufrecht
    • Seitenrichtig
    • Vergrößert
    • Virtuell

    Die Lupe ist das einfachste aller optischen Geräte, denn es bloß aus einer einfachen Sammellinse mit passender Brennweite besteht. Warum vergrößert der Würfel mit der 50𝑚𝑚 den kleinen Text? Befindet sich das Objekt vor der Brennweite der Linse – also weniger als 50 𝑚𝑚 vor der Linse – erzeugt die Linse ein virtuelles Bild, welches hinter dem eigentlichen Objekt liegt. Das Auge nimmt es vergrößert wahr. Schau dir dazu das Diagramm oben an.

    Berechne die Vergrößerung der Lupe mit der folgenden Formel:

    250 𝑚𝑚 ist die Entfernung der deutlichen Sehweite – also der Abstand zwischen Objekt und Auge bei dem die meisten Menschen gut lesen können. Mehr dazu später in der „Akkommodation“ des Auges.

    Wie funktioniert ein Kinoprojektor?

    Nimm dir den UC2-Linsenwürfel mit der Brennweite 𝑓=40𝑚𝑚 und platziere ihn hinter den Probenhalterwürfel. Der Abstand zwischen dem Objekt und der Linse (also die Gegenstandsweite g) sollte ca. 50 mm entsprechen. Wenn du nun mit der Taschenlampe das Objekt beleuchtest, wirst du es in einem Abstand von ca. 200 mm auf der Wand scharf sehen. Ein Kinoprojektor hat statt des Objekts einen Filmstreifen und natürlich eine viel stärkere Lichtquelle.

    Benutz eine Taschenlampe (z.b. von deinem Handy) als Lichtquelle und halt es vor das Objekt

    Benutz das Bild oder den Text an dem Mikroskop-Objektträger als das Objekt

    Wie ist das Bild orientiert? Schiebe die Linse in dem Würfel hin und her und beobachte, wann das Bild scharf ist. Finde das Bild für g = 50 mm, 60 mm, 65 mm und messe den Abstand zwischen der Linse und dem Bild.

    Wie funktioniert ein Kinoprojektor?

    Wo ist das Bild?

    Bei der Abbildung eines Gegenstands durch eine Sammellinse hängen Lage und Größe des Bildes von der Entfernung (g) des Gegenstands zur Linse und von deren Brennweite (f) ab. Die Linsengleichung beschreibt den Zusammenhang zwischen der Bildweite (b) und Objektweite (g):

    Wie groß ist das Bild?

    Die Vergrößerung des Objekts auf dem Schirm lässt sich einfach mit der folgenden Formel ausrechnen:

    So funktioniert der Projektor

    Prüfe, ob deine Beobachtung mit der Berechnung übereinstimmt

    Berechne die Vergrößerung des Projektors für die verschiedenen Werte von g und b.

    Unsere Linse hat eine Brennweite von f= 40 mm.

    Für g = 50 mm → b = 200 mm

    Für g = 60 mm → b = 120 mm\

    Für g = 65 mm → b = 104 mm\


    Der Projektor erzeugt immer ein vergrößertes, invertiertes (umgekehrtes) Bild. Die Position des Bildes und seine Vergrößerung hängen von der Position und der Größe des Objekts ab.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/GERMAN/CoreMikroskop/index.html b/docs/Toolboxes/DiscoveryCore/GERMAN/CoreMikroskop/index.html index 03937d144..4913fde3a 100644 --- a/docs/Toolboxes/DiscoveryCore/GERMAN/CoreMikroskop/index.html +++ b/docs/Toolboxes/DiscoveryCore/GERMAN/CoreMikroskop/index.html @@ -9,14 +9,14 @@ - - + +

    Mikroskop

    Lichtmikroskop mit „Unendlich-Optik“

    Duration: 2

    Was passiert wenn man das Kepler-Fernrohr umdreht?

    Stell das Objekt ca. 40 mm vor das Objektiv und finde das Bild ca. 100 mm hinter der Tubuslinse (mit einem Papier oder der Wand als Bildschirm), wie im Diagramm gezeigt. Bewege die Linsen so, dass du ein scharfes Bild erhältst.

    Platziere das Objekt mit dem Objektiv zusammen als eine Einheit auf das Papier. Stell die Tubuslinse in einem Abstand von 100 mm zu deinem Bildschirm (Papier, Wand). Ändere die Entfernung zwischen den Linsen – ändert sich das Bild?

    Das bedeutet „Unendlich-Optik“

    Duration: 2

    Ein Mikroskop ist ein Gerät, das es erlaubt, Objekte stark vergrößert anzusehen oder bildlich darzustellen.

    Das Bild wird Zwischenbild genannt, da es häufig noch mit einem Okular weiter vergrößert wird.

    Das Objekt steht ungefähr in der objektseitigen Brennebene des Objektivs. Somit werden alle einfallenden Strahlen in ein paralleles Strahlenbündel hinter dem Objektiv umgewandelt. Das Objektiv hat eine kurze Brennweite..

    Die Tubuslinse erzeugt ein reelles Bild indem sie parallelen Strahlen, die auf die Tubuslinse treffen, in ihrer Brennebene gesammelt werden. Wenn wir sie hinter dem Objektiv platzieren, wird das Objekt aus der Brennebene des Objektivs abgebildet. Die Tubuslinse hat eine längere Brennweite als das Objektiv.

    Das Bild in der Ebene des Zwischenbildes ist umgekehrt, seitenvertauscht, vergrößert und reell. Das reelle Bild kann man auf einem Bildschirm sehen.


    Das bedeutet „Unendlich-Optik“

    Duration: 2

    Was ist die Vergrößerung des Bildes?

    Vergrößerung des Bildes

    Die Linsen des Kepler-Fernrohrs können auch für ein Mikroskop verwendet werden, aber in einer anderen Reihenfolge. Solange das Objekt in der Brennebene des Objektivs und der Bildschirm in der Brennebene der Tubuslinse ist, ist die Entfernung zwischen Objektiv und Tubuslinse egal, weil die Lichtstrahlen parallel sind.

    „Unendlich-Optik“-Mikroskop mit Okular

    Duration: 2

    Kannst du das mikroskopische Bild durch die Okularlinse mit dem Auge sehen? Welche Wirkung hat der Spiegel? Baue das Mikroskop ohne den Spiegel auf. Vergewissere dich, dass du nach wie vor zwei Leerräumen zwischen der Tubuslinse und dem Okular hast. Was beobachtest du dann?

    Eine kurze Einführung in die Spiegel und was damit machen kann, kannst du hier finden:

    Wozu ist das Okular gut?

    Duration: 2

    Neuere Mikroskope sind mit einer sogenannten „Unendlich-Optik“ ausgestattet. In diesem Fall erzeugt das Objektiv kein reelles Zwischenbild. Das Licht verlässt das Objektiv als unendliche parallele Strahlen. Am Ende von dem „unendlichen“ Tubus befindet sich eine Tubuslinse. Diese erzeugt ein Zwischenbild, das dann wieder durch das Okular vergrößert wird.

    Das Bild hinter dem Okular ist umgekehrt, seitenvertauschet, vergrößert und virtuell. Das virtuelle Bild kann man mit dem Auge sehen.

    Diese Konfiguration ist in modernen Mikroskopen sehr nützlich, da zusätzliche Komponenten wie Filter zwischen Objektiv und Tubuslinse platziert werden können, ohne den optischen Pfad zu beeinträchtigen.

    Ein Filter kann dazu verwendet werden, die Helligkeit und Farbe des Bildes zu verändern.


    Dazu ist das Okular gut

    Duration: 2

    Was ist die Vergrößerung nach dem Okular?

    Gesamtvergrößerung

    Ein Okular ist eigentlich bloß eine Linse, die das Zwischenbild vergrößert. Es bildet das virtuelle Bild so ab, dass du es mit dem Auge betrachten kannst.

    Mit dem Spiegel kannst du nicht nur dich selbst sehen, sondern auch das einfallende Licht in eine beliebige Richtung reflektieren. So kannst du den optischen Weg falten und die Arbeit damit angenehmer zu gestalten. Der Spiegel hat keinen Einfluss auf die Vergrößerung, aber er dreht das Bild in eine Richtung.

    Lichtmikroskop mit „Endlich-Optik“

    Duration: 2

    Stell die Würfel auf die Positionen, wie unten im Diagramm gezeigt und schaue durch das Okular.

    Bau das Mikroskop wie ein Sandwich indem du eine zweite Schicht mittels Grundplatte hinzufügt. Schau von oben durch das Okular.

    Siehst du das Bild wie bisher durch das Okular? Kannst du das echte Zwischenbild auch mit einem Blatt Papier finden?

    Drehe das kleine Zahnrad des Objektivhalters. Damit bewegst bzw. fokussierst du das Objektiv. Wenn es nicht weiter geht, kannst du auch das Objektiv in dem Halter verschieben.

    „Endlich-Optik“ versus „Unendlich-Optik“

    Duration: 2

    Die Objektive von älteren oder kleineren Mikroskopen sind häufig so genannte Endlich-Objektive. Sie verhalten sich wie eine Linse mit äußerst kurzer Brennweite und erzeugen ein Zwischenbild hinter dem Objektiv mit einer Bildweite die sich durch die Tubuslänge definiert. Die Tubuslänge ist aufgedruckt und entspricht bei unserem Objektiv 160mm. Dort entsteht das reele Zwischenbild und wird durch die Okularoptik vergrößert.

    Mikroskope haben die Möglichkeit, das Objekt entweder durch Bewegen des Objekts oder des Objektivs zu fokussieren bzw. scharfzustellen. Hier bewegen wir das Objektiv durch einen einfachen Mechanismus. Die Drehung des Zahnrads wird in eine Verschiebung der Objektivlinse umgesetzt. Für größere Bewegungen kannst du das Objektiv zusätzlich in der Schiene verschieben.


    „Endlich-Optik“ versus „Unendlich-Optik“

    Duration: 2

    Was ist die Vergrößerung des Zwischenbildes? Und was ist die Vergrößerung nach dem Okular?

    Objektivvergrößerung

    wie geschrieben darauf

    Okularvergrößerung

    Gesamtvergrößerung

    Das Bild ist größer als mit dem Unendlich-Mikroskop. Die Vergrößerung des Objektivs beträgt hier 4×. Wenn du die Vergrößerung mit dem vorherigen Mikroskop berechnet hast, überrascht dich das sicherlich nicht.

    Das Zwischenbild wird jetzt nur noch durch das Objektiv gebildet und liegt 160 mm dahinter. Wir werden im nächsten Schritt herausfinden, warum.

    Objektiv und Okular

    Duration: 2


    Das ist das Objektiv

    Duration: 2

    Ein Objektiv ist ein optisches System, das eine vergrößerte Abbildung eines Gegenstandes erzeugt. Die verschiedenen auf dem Objektiv abgedruckten Zahlen haben verschiedene Bedeutungen:

    Das 4× Objektiv hat nur eine Linse drin. Die Objektive mit stärkere Vergrößerung sind ganze Linsensystemen.

    Das Objektiv ist auch eine Sammellinse mit kurzer Brennweite. Das 4x Objektiv hat eine Brennweite von f=32 mm. Wenn es als Lupe verwendet wird, hat es eine höhere Vergrößerung als die 40-mm-Linse. Das Sichtfeld ist scharf, aber klein.

    Das Okular

    Duration: 2

    Ein Okular ist eigentlich eine Lupe, da es das Zwischenbild vergrößert. Das Okular, was wir hier verwenden, ist ein sogenanntes Ramsden-Okular. Eine einzelne Linse kann auch als Okular benutzt werden. Allerdings ist bei einem aus Linsensystem bestehendes Ramsden-Okular das Sichtfeld besser, weil es weniger Fehler am Rand des Sichtfelds erzeugt. Das Ramsden-Okular besteht aus zwei Linsen mit der gleichen Brennweite. Seine Brennweite beträgt fRamsden-Okular = 3/4 fLinse

    Was ist die Vegrößerung des Ramsden-Okulars?

    Jedes Okular hat eine so genannte Ramsden-Scheibe, das ist der kleinste Durchmesser des Lichtstrahls, der aus dem Mikroskop durch das Okular austritt.

    Das Sichtfeld ist größer und das Bild sieht mit dem Ramsden-Okular klarer aus. Jedes Okular hat eine so genannte Ramsden-Scheibe, das ist der kleinste Durchmesser

    Smartphone Mikroskop

    Duration: 2

    Baue das Smartphone Mikroskop wie dargestellt. Nutze zwei beliebige Würfel hier um das Smartphone sicher zu platzieren.


    Bau das Mikroskop wie ein Sandwich indem du eine zweite Schicht mittels Grundplatte hinzufügt. Schau von oben durch das Okular.

    Tausche das Ramsden-Okular mit der 40-mm-Linse aus. Was eignet sich besser für das Auge und was für das Smartphone?

    Besser mit Smartphone oder Auge?

    Duration: 2

    Die Smartphone Kamera hat eine Linse mit sehr kurzer Brennweite da diese in das dünne Smartphone passen muss. Die Linse erzeugt dann ein Bild auf dem Kamera-Sensor dessen Eingeschaften denem menschlischen Auge ähneln.

    Das Auge kann Objekte aus sowohl Ferne als auch aus der Nähe abbilden. Diese Eigenschaft heißt Akkommodation.

    Die Smartphone-Kamera kann das auch, es wird allerdings als Autofokus bezeichnet. Es beschreibt die Fähigkeit, Objekte in unterschiedlichen Entfernungen scharf auf dem Sensor abzubilden.

    Das Bild aus dem Okular kommt in parallelen Strahlen, als käme es aus dem Unendlichen. Sie haben mit einem entspannten Auge (Blick in die Ferne) oder mit einer auf Unendlich fokussierten Kamera beobachtet.


    Berechnungsergebnisse

    Duration: 2

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/GERMAN/CoreTeleskop/index.html b/docs/Toolboxes/DiscoveryCore/GERMAN/CoreTeleskop/index.html index cd14a5a92..f870e1b1b 100644 --- a/docs/Toolboxes/DiscoveryCore/GERMAN/CoreTeleskop/index.html +++ b/docs/Toolboxes/DiscoveryCore/GERMAN/CoreTeleskop/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -19,7 +19,7 @@ Stell die Linsen in die richtige Positionen wie im Diagramm dargestellt und schaue durch das Fernrohr in die Ferne.

    Wie sieht das Bild hier aus im Vergleich zum Kepler-Fernrohr?

    Während du durch das Fernrohr schauest, passe die Abstände zwischen den Komponenten an, um ein scharfes Bild zu sehen!

    So funktioniert das Beobachtungsfernrohr

    Die Vergrößerung ist wie die des Kepler-Fernrohrs. Die Umkehrlinse verändert nur die Orientierung (das Bild wird umgekehrt), nicht die Vergrößerung.

    Für terrestrische Beobachtungen ist ein aufrechtes Bild notwendig. Echte terrestrische Teleskope verwenden Prismensysteme, um das Bild zu drehen und kompakt zu halten.

    Das Bild ist

    Vergrößert mit der gleichen Vergrößerung wie mit dem Keppler Fernrohr Aufrecht *Seitenrichtig


    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/Opticsintro/index.html b/docs/Toolboxes/DiscoveryCore/Opticsintro/index.html index f8e233252..91159204a 100644 --- a/docs/Toolboxes/DiscoveryCore/Opticsintro/index.html +++ b/docs/Toolboxes/DiscoveryCore/Opticsintro/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -35,7 +35,7 @@

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/SPANISH/core_intro/index.html b/docs/Toolboxes/DiscoveryCore/SPANISH/core_intro/index.html index 77eb0cb90..592e97b86 100644 --- a/docs/Toolboxes/DiscoveryCore/SPANISH/core_intro/index.html +++ b/docs/Toolboxes/DiscoveryCore/SPANISH/core_intro/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -20,7 +20,7 @@ Objetivo del microscopio: un sistema de lentes especial que agranda un objeto

    Un recorrido rápido a través de una caja ejemplar y cómo se ven los cubos (estado de octubre de 2022) se puede encontrar aquí:

    ¿Qué es UC2?

    El elemento central del proyecto UC2 es un cubo simple. El cubo consta de dos mitades y alberga un inserto deslizable. El inserto puede contener varios componentes ópticos (por ejemplo, lentes, espejos), lo que significa que se pueden implementar diferentes funciones con cada cubo.

    Tipo de cubo 1: moldeado por inyección con conexión de enchufe

    Base

    Base

    El cubo se puede montar en una placa base. Los módulos de la placa base se pueden juntar como un rompecabezas.

    UC2 para imprimir tú mismo

    El cubo UC2 también se puede imprimir en 3D. Se ve igual que el modelo moldeado por inyección, pero aquí consiste en una tapa de cubo y el cuerpo del cubo, que se mantienen unidos con tornillos. Los tornillos son excelentes para colocarlos en la placa magnética. Al combinar diferentes módulos de cubos, se pueden ensamblar fácilmente diferentes estructuras ópticas. Una nueva función se puede agregar con cada dado. Tu creatividad no tiene límites.

    Tipo de cubo 2: impreso en 3D con conexión magnética

    Dados

    Placa base con imanes

    En la placa base impresa en 3D hay pequeños imanes esféricos en los que se colocan los cubos.

    ¿Quieres más dados? Entonces puedes construirlos tú mismo. Puedes encontrar todo aquí

    Así es como encajan los dados

    Duración: 1

    Asegúrate de que los cubos estén colocados correctamente en la placa y no estén inclinados. Al final es importante que los insertos estén en el lugar correcto.

    Si no ves una imagen nítida, mueve los insertos (por ejemplo, lente) hasta que la veas claramente. La flecha verde en la imagen te muestra cómo hacerlo.

    Aquí puedes encontrar un pequeño video que explica el concepto central del cubo

    Of course, here's the translation of the provided text into Spanish:


    ¿Qué significan los símbolos?

    Duración: 2

    Experimenta Si ves este bloque, ¡hay algo con lo que experimentar! Puedes colocar un cubo UC2 en este bloque.
    Explicaciones: Si ves este icono, ¡hay algo que aprender!
    Facturas: Aquí hay algo que calcular. Toma un lápiz y papel y comienza a resolver rompecabezas.
    Precaución: ¡No toques las superficies de vidrio con tus dedos!
    Limpieza de las lentes: Si ya has tocado la lente, puedes limpiarla con un paño para gafas.

    ¿Qué puede hacer una lente?

    Duración: 2

    Toma uno o más de los cubos que tienen una lente y observa el símbolo UC2 mostrado aquí. Sostén el cubo en tu mano y cambia la distancia entre la lente y la imagen.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/Smartphone Microscope/index.html b/docs/Toolboxes/DiscoveryCore/Smartphone Microscope/index.html index 980e81b94..77dcf6480 100644 --- a/docs/Toolboxes/DiscoveryCore/Smartphone Microscope/index.html +++ b/docs/Toolboxes/DiscoveryCore/Smartphone Microscope/index.html @@ -9,13 +9,13 @@ - - + +

    openUC2 Smartphone Microscope with a finite corrected objective lens

    This video shows you how to build the UC2 smartphone microscope as also indicated in the PDF manual. It shows some tricks how to make it more stable and how to operate the Z-stage

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/index.html b/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/index.html index e0cfd135c..053365c37 100644 --- a/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/index.html +++ b/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/index.html @@ -9,13 +9,13 @@ - - + +

    The Course BOX Alignment Procedure (Finite Optics)

    This is the alignment procedure of the experiments with finite-corrected optics. If you are looking for the infinity-corrected setups click here.

    First experiment: Finite-corrected Microscope with Köhler Illumination

    This experiment demonstrates the essential parts of a microscope and explains the concept of conjugate planes. The key components are: Light source, Collector lens, Field Diaphragm, Aperture Diaphragm, Condenser lens, Sample, Objective lens and Eyepiece lens.

    The Aperture set of conjugate planes: Lamp filament, Aperture diaphragm, Back Focal Plane of the objective, Exit pupil of the eye.

    The Field set of conjugate planes: Field diaphragm, Sample plane, Primary Image Plane, Retina.

    Second experiment: Abbe Diffraction Experiment

    The famous Abbe Diffraction Experiments shows how diffraction of light by a specimen (and interference with the illuminating light) creates an image and how collection of diffracted light defines the resolution of the microscope. With this setup it is possible to view both sets of conjugate planes at the same time, with one's eye or a camera.

    The Aperture set of conjugate planes: Lamp filament, Aperture diaphragm, Back Focal Plane of the objective, Mirror surface in the side arm, Retina.

    The Field set of conjugate planes: Field diaphragm, Sample plane, Primary Image Plan, Retina.

    We propose to use a diffraction grating as a sample and spatial filter in the BFP.

    This tutorial will lead you step-by-step through the alignment of the Finite-corrected Microscope, Köhler Illumination and Abbe Diffraction Experiment.

    1. Start with 1×13 baseplate and all the cubes:
    • Flashlight Cube (1)
    • Collector Lens Cube (2)
    • 2× Circular Aperture Cube (3)
    • Condenser Lens Cube (4)
    • Sample Cube (5)
    • Objective Lens Cube (6)
    • Objective in Z-Stage Cube (7)
    • Eyepiece Lens Cube (8)
    • 4× Screen Cube - 1× with white paper, 3× with lens tissue (9)

    2. Start by placing the sample - we will build the microscope around it.

    3. Place the Primary Image Plane (PIP) by definition: the distance is 200 mm from sample to PIP when using the 4× objective (finite-corrected for 160 mm, 40 mm working distance). Use the Sample cube with white paper as a screen.

    1. Place the objective lens. It is a single plano-convex lens with f' = 35 mm.

    2. Use direct illumination from the flashlight with! its lens. Adjust the position of the objective lens - focus the image on PIP by moving the lens back or forth.

    • Focussing Trick: Firstly move the whole objective lens cube in one direction (away from the sample). If the image sharpness in PIP improves, slide the insert in that direction. If the image sharpness in PIP get worse, slide the insert in the opposite direction, towards the sample. Continue until you get a focussed image of your sample on the PIP.

    1. Place the eyepiece lens behind the PIP. It is a single plano-convex lens with f' = 40 mm. Exchange the PIP screen with a semitransparent screen (lens tissue). While looking through the eyepiece, focus it on the PIP. Use the Focussing Trick again. The position within the cube of the sample holder for the paper screen and for the semitransparent screen has to be identical.

    2. Take away the screen from PIP. To dim the flashlight, put a piece of lens tissue in front of it. Look through the eyepiece - you should see a sharp image of your sample.

    3. Place the Field Diaphragm (FD). The position was chosen in order to work well with the availible lenss.

    1. Place the condenser lens. It is a single plano-convex lens with f' = 40 mm.

    2. Place the PIP screen back to its position.

    3. Place the flashlight on one end of the baseplate. Close the FD.

    4. Adjust the position of the condenser lens - focus the image of the FD on PIP by moving the lens back or forth (Focussing Trick). Once you see a sharp image of the closed FD on the screen in PIP, open and close the aperture and observe its effect.

    5. Remove the screen, look throught the eyepiece and check whether you see a sharp image of the closed FD.

    6. Place the Aperture Diaphragm (AD) into the Front Focal Plane (FFP) of the condenser lens (40 mm).

    • Focal Plane Trick - In case you don't know where exactly the FFP is, use this:

    • Use the Laser Cube with Beam Expander Cube to produce a collimated beam. Place the condenser lens in the collimated beam and find focus.

    • Place the AD into the same plane - slide in within the cube. Careful - push the Aperture from one side to keep it together.

    1. Remove the lens of the flashlight. The position of the flashlight remains.

    1. Place the collector lens. It is a single plano-convex lens with f' = 50 mm.

    2. Remove the FD. Close the AD. Center the flashlight with respect to the AD. Focus the image of the LED on the AD by adjusting the position of the collector lens (Focussing Trick).

    3. Place the FD back in the illumination path. Now the Köhler illumination is properly aligned.

    4. Place a semitransparent screen into the Back Focal Plane (BFP) of the objective lens. Close the AD and check that you see a focussed image of the AD in the BFP.

    5. Remove all screens. Use a lens tissue to dim the light and look through the eyepiece. Observe the effect of opening and closing the apertures.

    • Left: both apertures open. Middle: FD closed. Right: AD closed.

    1. Exhange objective lens with 4× objective in Z-Stage. Place a screen in PIP and observe the effect of opening and closing the apertures in the PIP.

    • Top: both apertures open. Middle: AD closed. Bottom: FD closed.

    1. Remove the screen. This is an aligned finite-corrected microscope with Köhler illumination.

    1. By adding 4 more cubes and another baseplate, we will now create the Abbe Diffraction Experiment. The extra cubes are:
    • Beamsplitter Cube (1)
    • Mirror 45° Cube (2)
    • Eyepiece Lens Cube (different f' than the previously used one!) (3)
    • Relay Lens Cube (4)

    24. Remove the eyepiece (40 mm) and add the 4×2 baseplate. Exchange the 4× objective with the objective lens (35 mm).

    25. Place the Beamsplitter Cube.

    1. Place the screen into the PIP, in front of the Beamsplitter. Place a new eyepiece lens behind the Beamsplitter. It is a single plano-convex lens with f' = 100 mm. While looking through the eyepiece, focus it on the PIP (Focussing Trick).

    2. In the other arm we will observe the BFP. There are two options, depending on how you place the Mirror in the next step.
    • Option 1: Place the mirror as shown in the picture.
    • Place the eyepiece lens (40 mm).

    • Option 2: Place the mirror as shown in the picture.
    • Place the baseplate connector on the mirror cube.
    • Place the eyepiece cube (40 mm) on the connector on the mirror cube.

    1. Place the relay lens after the Beamsplitter. It is a single plano-convex lens with f' = 75 mm.

    2. Place a semitransparent screen in the BFP of the objective lens. Close AD and look through the eyepiece of the side arm. You should see a sharp image of the AD on the BFP.

    3. This is the Abbe Difraction Experiment. Through the eyepieces you can see both sets of conjugate planes at the same time.

    Participate

    If you have a cool idea, please don't hesitate to write us a line, we are happy to incorporate it in our design to make it even better.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/index.html b/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/index.html index d04f8e2f2..f659d4cda 100644 --- a/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/index.html +++ b/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -51,7 +51,7 @@ We use a laser pointer as a light source and expand it using two lenses. The imaging path is the same as in the microscope in the first experiment and therefore we can observe the image of our sample in the main arm and the image of the BFP in the side arm.

    1. We use a very fine fish net as a sample here. You could try a net like this one. Another idea is to try one of these plastic tea bags. Or a diffraction grating.

    2. We provide a circular aperture and a rectangular aperture to be used in the BFP. We also suggest to use an opaque dot (a small dot made with some marker or pain on a microscope slide) and a thin line object (like this tiny hex key here). The apertures block the light from the outside while the dot and line can block the center of the light path.

    3. As mentioned earlier, we keep the imaging path in both main arm and side arm. Remove the illumination part of the microscope and also the Eyepiece.

    4. Besides the apertures that we already prepared, we will need :

    • Laser Cube with laser pointer (1)
    • 1× Lens Cube with 50 mm lens (2)

    For now, place the camera in PIP.

    1. The laser is equipped with a cap that holds a lens from the RasPi camera. Make sure to put it on, otherwise you won't be able to create an expanded parallel beam.

    2. Place the Laser cube on the baseplate as shown in the picture.

    Careful! Do not hit anybody's eyes with the laser beam. Keep the laser off if you're not using it at the moment. Always point the laser away from people. Block the light if it's leaving the table you're working on.

    1. Place the lens for beam expansion behind the Laser cube as shown in the picture. It is a single plano-convex lens with f' = 50 mm. Align the lens to illuminate your Sample with a collimated beam - the diameter of the beam should be the same just after the lens cube and also far away from it. When you beam is well-collimated, the distance between the laser+lens duo and the Sample doesn't matter.

    2. You can switch the camera between the PIP and the BFP. You could also use two cameras, one in PIP and one in BFP, if you have them.

    3. Between the objective and the Beamsplitter is the Back Focal Plane of the Objective. You can see it if you put a piece of paper there - you will see the Fourier transform of the sample. You should see the same on camera in the side arm.

    4. In the PIP, you can see an image of the sample. Here we see our fish net. Align the camera to obtain a sharp image.
      ⭐ Because of the Talbot effect you can find more than one sharp image of the sample. Therefore, partially close the Field diaphragm (FD) and find the position of the camera where you not only see a sharp image of the grating (fish net) but also of the FD.

    1. In the BFP image in the side arm, you can see the Fourier transform of the grating just as it looks in the BFP itself. Align the second Relay lens to obtain a focussed image on the camera.
      ⭐ The grating is regular in both X and Y and therefore it's a very convenient sample for this experiment, because its Fourier transform is easily predictable. With a different sample the BFP will of course also look differently.

    ⭐ Back Focal Plane

    The intensity peaks in the BFP are the diffraction orders of our sample. By placing an aperture or another object here we’ll be able to modify the information transmitted through the microscope that contributes to the image. Depending on the aperture we can observe different effects.

    • Circular aperture: The circular aperture blocks the light symmetrically from outside towards the center. Close the aperture and align the laser such that the 0th order is in the center of the aperture. You can align the laser using the four screws in its holder.

    • Rectangular aperture: The rectangular aperture closes independently from both sides in X and Y direction (horizontally and vertically). Use a hex key or a similar tool to close/open the aperture doors.

    • Dot and line: Use a sample holder cube or your (presumably steady) hand to hold these two. You can block the 0th or 0th+1st orders with the dot, depending on how big it is. You can block the X-0th or Y-0th order with the line-object.

    1. This is the setup for the second experiment: Abbe Diffraction Experiment.

    ⭐ Abbe Diffraction experiment - What do we see?

    1. With no aperture in the BFP, we see the image of the Sample in PIP and the Fourier transorm of the sample in the BFP, as we just aligned it and prepared it.

    2. Firstly we use the Circular aperture. As we slowly close it and change the diameter of the transmitting area, we cut out the higher diffraction orders that carry the high frequency information, hence the fine details. In the image plane we see how these details blur and the sharp edges soften. The more orders we cut out, the blurrier the image gets.

    3. Using the Rectangular aperture, we can block the diffraction orders more selectively. When we close the aperture in the X direction to only let through the Y-0th orders, the square pattern of the image disappears, and we have only lines. This is because there is no X order that would transmit the information about the shape in the perpendicular direction.

    4. When we do the same trick in the other direction, we then see lines of the other orientation but again no square pattern.

    5. Closing the aperture in both X and Y direction, we eventually block all the higher orders that form the image of the sample. As we can see here, when only the 0th order is transmitted all image information is lost. What we see is only some background noise.

    6. On the other hand, when we block only the 0th order but keep all the others (we do this using the dot on a slide), we are still able to see the pattern is preserved, because all the orders still have a corresponding partner to interfere with on the other side from the 0th order. But now we are in a so-called dark field imaging mode. We'll explain it in the next steps.

    7. We can even block the 0th and 1st order by simply using a bigger dot in the BFP. We are still able to recognize the square pattern but the high frequency information, the noise, is taking over the image.

    8. When using the line object instead of the dot, we can block the 0th order completely in the Y direction and see what it does to the image. We still see the square pattern but suddenly, in the X direction, it seems that we have twice as many squares. This is the dark field imaging effect but in X only. We’re seeing just the edges and because there are two edges per square in one direction, it appears that we see them twice.

    9. The same works also in the perpendicular direction - blocking the 0th order in X results in the dark field imaging mode in Y.

    10. Using the rectangular aperture again and we can find out what is the minimal amount of orders that we need to form a reliable image. We said that they always interfere with the 0th order, so we don't need both sides. Therefore, we close the aperture and let through only one quarter of the orders. We can block the higher orders as well, as they only carry the high frequency information, and we are still able to see the basic pattern of our sample.

    ⭐ Watch the video of this experiment!

    UC2 YouSeeToo - Abbe Experiment Demonstration

    Notes to the video:

    • In this demonstration of the experiment, two Alvium cameras from Allied vision are used, so we can show the PIP and BFP on the screen simultaneously
      • Find the cubes for the Alvium cameras here anch choose the adjustable insert for easy alignment.
    • The optical path is different from the one described in this tutorial. This is because of the use of the above mentioned cameras
      • The objective and eyepiece are both lenses with f' = 100 mm. The magnification of the microscope is therefore equal to 1. The "magnified" image is just a zoom into the camera view.
      • Thanks to the use of a 10 mm lens as an objective, the diffraction orders in BFP are more separated and easily accessible.
      • In the side arm, the first lens has f' = 100 mm and the second lens f' = 50 mm. The image of the BFP is therefore demagnified twice, to fit better in the field of view of the camera.

    Bonus question: This magical image was taken by the RasPi camera in the BFP with the fish net as a sample. If you tell me what created this effect, I send you a chocolate ;-)

    Participate

    If you have a cool idea, please don't hesitate to write us a line, we are happy to incorporate it in our design to make it even better.

    References:

    1; 2; 3; Cat image source;
    4 Advanced Optical Imaging Workshop; Plymouth; Noah Russell, 2009©

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryDiffraction/MicroscopyCore/index.html b/docs/Toolboxes/DiscoveryDiffraction/MicroscopyCore/index.html index 776e080f6..0061099e2 100644 --- a/docs/Toolboxes/DiscoveryDiffraction/MicroscopyCore/index.html +++ b/docs/Toolboxes/DiscoveryDiffraction/MicroscopyCore/index.html @@ -9,13 +9,13 @@ - - + +

    MicroscopyCore

    The first version of this BOX was designed for the "Principles of Light Microscopy" Course of the Light Microscopy Facility of MPI-CBG in Dresden. The alignment procedure was developed and optimised by Sebastian Bundschuh. It follows the lectures of Peter Evennett that can be found here.

    CourseBOX teaches the core principles of microscopy and basics of optical alignment. It is intended for microscopy courses for students that are rather on the side of users than designers. This BOX provides a hands-on experience with insight into the black box that microscope often seems to be. It comes with alignment tutorials and relies on basic components. By reusing the components and starting from the common ground, it shows that all the microscopy methods are based on only a few principles.

    It is not yet a comprehensive and optimised toolbox but rather a collection of modules and experiments that are frequently taught in microscopy courses. There is still space for improvement and we're hoping that more universities and institutions adopt the CourseBOX in their courses, which will lead to it's improvement and (if successful) production.

    Build the BOX

    A list of 3D-printed parts and necessary components is found in BUILD_ME, together with assembly guidelines and some printing tips and tricks.

    Setups

    What can you build with the CourseBOX?

    Compound microscope with proper Köhler illumination (finite optics)

    A finite corrected microscope with proper Köhler illumination. All conjugate planes are accessible.

    LINK for the detailed alignment procedure with image tutorial.

    Abbe Diffraction Experiment (finite optics)

    Classical experiment for explaining Fourier transform done by a lens. The illumination stays the same as in the previous experiment, but a beamsplitter and a relay lens are added, for simultaneous observation the Primary Image Plane and the Back Focal Plane.

    LINK for the detailed alignment procedure with image tutorial.

    Compound microscope with proper Köhler illumination (infinity optics)

    An infinity corrected microscope with proper Köhler illumination. All conjugate planes are accessible.

    LINK for the detailed alignment procedure with image tutorial.

    Abbe Diffraction Experiment (infinity optics)

    Classical experiment for explaining Fourier transform done by a lens. The setup of the previous experiment is reused and a laser pointer is added as a light source.

    LINK for the detailed alignment procedure with image tutorial.

    Coming soon:

    Laser Scanning Confocal Microscope

    Laser Scanning system is built on the detection side of the same setup. Scanning mirror can be rotated around one axis, which results in the translation of the point on the sample.

    Light Sheet Microscope

    The principle of Selective Plane illumination Microscopy is demonstrated with white light. The illumination path stays the same, only the collector lens is exchanged for a cylindrical one. The detection path is rotated by 90°.

    Participate

    If you have a cool idea, please don't hesitate to write us a line, we are happy to incorporate it in our design to make it even better.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryDiffraction/index.html b/docs/Toolboxes/DiscoveryDiffraction/index.html index 4e558f27f..0d9f08921 100644 --- a/docs/Toolboxes/DiscoveryDiffraction/index.html +++ b/docs/Toolboxes/DiscoveryDiffraction/index.html @@ -9,15 +9,15 @@ - - + +

    Building The CourseBOX

    This is a guide for building the CourseBOX. If you were looking for another BOX version, click here.

    This guide currently describes how to build both the finite-corrected version and the infinity version of the CourseBOX.

    Content of each section

    1. Shopping
    2. 3D printing
    3. Tools
    4. Assembly

    Infinity Optics

    • Total price: 300 € without a Raspberry Pi (190 €) (assuming only material costs and printing everything yourself)
    • Printing time including preparation: 4 days
    • Assembly time: 1 day

    Shopping

    What to buy

    • Check out the RESOURCES for more information!
    Link - name of partAmountCommentPrice
    3D printing material~580 gChoose material that works with your 3D printer. If unsure, have a look at the guide in 3D printing section15 €
    Lens 50 mm5 piecesá 21 €
    Lens 100 mm3 piecesArtikel 2004á 6 €
    Lens for Eye Cube1 piece551.OALá 5 €
    Beamsplitter1 pieceArtikel 2137á 27 €
    Flashlight1 pieceLight source for microscope.á 7 €
    Laser Pointer1 pieceLight source for the Abbe experiment. With this very one you don't even need the flashlight. More information below.á 5 €
    Magnets64 piecesWhen using 3DP Cubes and baseplates. Ball magnets, diameter 5 mm.total 20 €
    Screws7 piecesM2×16 - 6 pieces; M2 nut - 6 pieces; M3×18 - 4 piecestotal ~2 €
    Screws112 piecesFor 3DP Cubes extra: (Art.-Nr. 00843 12) M3×12, galvanized steel - 64 pieces; (Art.-Nr. 00843 8) M3×8, galvanized steel - 64 pieces (or 128 pieces of M3×12) - BUT for this setup it isn't necessary to have screws on both sides ; (Art.-Nr. 025505 8) M5×8, galvanized steel - 96 pieces (half if one-sided)total ~4 €
    Raspberry Pi with accessoriesHave a look in our Bill-of-Materials for a complete list and links.190 €
    Chocolate1 barUse it as a reward when you're done.

    2 in 1 light source

    We propose the use of this Laser Pointer, because it also has a white LED. But we need to smartly adapt this for our experiments.

    • For beam expansion of the laser light, print a laser cap from the STL folder and put in it the RasPi lens that you removed from the camera
    • The LED here has a lens which cannot be removed. To be able to find a focussed image of your light source, take a thin permanent marker and make a cross on the surface of this lens. Now you'll be able to see a focussed image of the cross in the Field set of Aperture planes

    3D Printing:

    Parts

    To acquire the STL-files use the UC2-Configurator. The files themselves are in the RAW folder. The BOXes can be built using injection-moulded (IM) or 3D-printed (3DP) cubes.

    Note on the lens holders: If you use some other lens, you can generate a holder for it using our openSCAD design. Go to the Thingiverse page of this lens holder and use their in-built customizer to change the parameters of the insert.

    Completely new to 3D printing? Have a look into this beginner's guide!

    Our quick printing tutorial can be found here: UC2 YouSeeToo - How to print the base-cube?

    We have a good experience with this printer and settings:

    • Prusa i3/MK3S
      • Prusament PLA 1,75 mm, for one Box: 0,58 kg = 195 m = 90 hours = 15 €
      • Profile Optimal 0,15 mm, infill 20%, no support, 215/60°C

    Which tools to use

    ToolImageComment
    Electric screw driver with 2,5 mm hex bitFor putting the cubes together using M3×12 and M3×8 screws.
    2,5 mm hex keyFor fine adjustment of all the M3 screws if needed.
    Needle-nose PliersMight come handy

    Assembly

    Part - linkResultComment
    Baseplates16× baseplate puzzle
    Lens Cubes8× Lens Cube: 5× Lens Cube with 50 mm lens; 3× Lens Cube with 100 mm lens.
    Sample Cubes2× Sample Holder Cube
    Flashlight Cube1× Flashlight Cube
    Circular Aperture Cube2× Circular Aperture Cube
    Rectangular Aperture Cube1× Rectangular Aperture Cube
    Beamsplitter Cube1× Beam Expander Cube
    RasPi Camera Cube1× Camera Cube with Raspberry Pi camera with the lens removed
    Laser Cube1× Laser Holder Cube and Laser Clamp with a cap for RasPi lens
    Eye Cube1× Eyeball Cube

    Software

    Prepare the Raspberry Pi following our tutorial in UC2-Software-GIT!

    Done! Great job!


    Finite-corrected Optics

    • Printing time including preparation: 5 days
    • Assembly time: 1 day

    Shopping

    What to buy

    • Check out the RESOURCES for more information!
    Link - name of partAmountCommentPrice per amount used
    3D printing material~620 gChoose material that works with your 3D printer. If unsure, have a look at the guide in 3D printing section20 €
    Microscope objective 4×1 piece10 €
    Lens 35 mm1 pieceWe did the alignment with lenses of these focal lengths, but other combination are also possible. The alignment principle stays the same, but the positions of the element will be different.22 €
    Lens 40 mm2 pieces44 €
    Lens 50 mm1 piece21 €
    Lens 75 mm1 piece20 €
    Lens 100 mm1 piece20 €
    Flashlight1 pieceLight source for the projector and microscope.7 €
    Magnets128 piecesBall magnets, diameter 5 mm.30 €
    Screws~120 piecesM3×12, galvanized steel - ~90 pieces; M3×8, galvanized steel - ~90 pieces; M3×18, galvanized steel - 2 pieces; M3×30, not magnetic - 1 piece; M3 nut~15 €
    Chocolate1 barUse it as a reward when you're done.

    3D Printing:

    Completely new to 3D printing? Have a look into this beginner's guide!

    Our quick printing tutorial can be found here: UC2 YouSeeToo - How to print the base-cube?

    We have a good experience with this printer and settings:

    • Prusa i3/MK3S
      • PLA 1,75 mm, for one Box: 0,6 kg = 235 m = 85 hours = 20 €
      • Profile Optimal 0,15 mm, infill 20%, no support, 215/60°C

    Note: The design of the mechanical Z-stage has recently been changed. The files here are not yet up-to-date. Please check the Mechanical Z-stage for the latest version. Same applies to the Lens Holder available here

    Housing

    Name of part - Link to STL fileAmount
    (01) Basic Cube 2×11 piece
    (02) Basic Lid 2×11 piece
    (03) Basic Cube 1×120 pieces
    (04) Basic Lid 1×120 pieces
    (05) Baseplate 4×14 pieces
    (06) Baseplate 4×21 piece
    (07) Baseplate 1×11 piece
    (08) Baseplate Connector 1×11 piece

    Inserts

    Name of part - Link to STL fileAmountComment
    (09) Z-Stage Focusing Insert1 pieceRotate the part in your slicer before printing. Always print it laying on the flat side.
    (10) Z-Stage Objective Mount1 pieceFor mounting the objective lens (RMS thread).
    (11a) Z-Stage Bottom Plate1 pieceThe plate holds the gear and screw in position, allowing the only to rotate but not to wobble.
    (11b) Z-Stage Top Plate1 pieceThe plate holds the gear and screw in position, allowing the only to rotate but not to wobble.
    (12) Z-Stage Gear1 pieceKindly borrowed from openflexure.
    (13) Lens Holder6 piecesDiameter fits for the listed lenses (25 mm).
    (14) Lens Holder Clamp6 piecesDiameter fits for the listed lenses (25 mm).
    (15) Cylindrical Lens Holder1 pieceDiameter fits for the listed lenses (25 mm).
    (16) Generic Sample Holder5 piecesIn the SimpleBOX, it is used to hold the object in the projector setup.
    (17) Generic Sample Holder Clamp5 piecesTo fix the sample.
    (18) Mirror Holder 45° 30×30mm²1 pieceSize fits for the listed mirrors.
    (19) Flashlight Holder2 pieces
    (20) Circular Aperture Guide2 pieces
    (21) Circular Aperture Wheel2 pieces
    (22) Circular Aperture Lid2 pieces
    (23) Circular Aperture Leaf14 pieces
    (24) Laser Holder2 pieces
    (25) Laser Clamp1 piece
    (26) Beam Expander Insert1 piece
    (27) Beam Expander Lens Adapter1 piece
    (28) Beamsplitter Insert1 piece

    Which tools to use

    ToolImageComment
    Electric screw driver with 2,5 mm hex bitFor putting the cubes together using M3×12 and M3×8 screws.
    2,5 mm hex keyFor fine adjustment of all the M3 screws if needed.
    1,5 mm hex key↑↑For mounting worm screws.
    Needle-nose PliersMight come handy

    Assembly

    Part - linkResultComment
    Baseplates1× "big" baseplate (4×2), 4× "small" baseplate (4×1), 1× "unit" baseplate (1×1), 1× "unit" baseplate connector (1×1)
    Z-Stage Cube1× mechanical Z-Stage, Sample Clamp not necessary
    Lens Cubes6× Lens Cube; Write the focal lengths of the lenses on the holders, so you can always easily find the right one when building the setups.
    Cylindrical Lens Cube1× Cylindrical Lens Cube
    Sample Cubes5× Sample Holder Cube
    Mirror Cube1× Mirror Cube
    Flashlight Cube1× Flashlight Cube
    Circular Aperture Cube2× Circular Aperture Cube
    Laser Cube1× Laser Holder Cube and Laser Clamp
    Beam Expander Cube1× Beam Expander Cube
    Beamsplitter Cube1× Beam Expander Cube

    Done! Great job!

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryElectronics/Automation_intro/index.html b/docs/Toolboxes/DiscoveryElectronics/Automation_intro/index.html index e1568025f..76e0d6f77 100644 --- a/docs/Toolboxes/DiscoveryElectronics/Automation_intro/index.html +++ b/docs/Toolboxes/DiscoveryElectronics/Automation_intro/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -33,7 +33,7 @@

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryElectronics/Camera Setup/index.html b/docs/Toolboxes/DiscoveryElectronics/Camera Setup/index.html index 0b213a03d..b0ae8f727 100644 --- a/docs/Toolboxes/DiscoveryElectronics/Camera Setup/index.html +++ b/docs/Toolboxes/DiscoveryElectronics/Camera Setup/index.html @@ -9,13 +9,13 @@ - - + +

    openUC2 Camera Setup

    The openUC2 Camera Setup provides guidance on configuring and using webcams and Daheng cameras for imaging purposes within the openUC2 ecosystem. This setup allows users to seamlessly integrate cameras into their experimental setups and utilize them for imaging and data acquisition. Below are detailed instructions for setting up cameras on different platforms:

    Webcam

    On Windows

    • Users can utilize the built-in webcam functionality provided by Windows. They need to open the webcam using the Windows internal software and start streaming.

    On Mac

    • For Mac users, the Photobooth application can be used to access the webcam. Simply open the Photobooth application and select the camera to start capturing images or videos.

    Alternative Method

    • Users can also use the openUC2 Web Serial interface available at https://youseetoo.github.io/indexWebSerialTest.html to open the camera stream.

    Daheng Cameras

    On Windows

    • To use Daheng cameras on Windows, users should visit https://www.get-cameras.com/customerdownloads?submissionGuid=93704570-544a-43e8-83d6-f5f3cf0b97fb.
    • From the provided options, select the "Windows SDK USB2+USB3+GigE (including Directshow + Python) Galaxy V1.23.2305.9161" package.
    • Install the software and drivers from the downloaded package.
    • Once installed, users can start the "Galaxy Viewer" application to begin capturing images using the Daheng camera.

    On Android Phones

    • To use Daheng cameras on Android phones, users should first visit https://www.get-cameras.com/customerdownloads?submissionGuid=93704570-544a-43e8-83d6-f5f3cf0b97fb.
    • From the provided options, select the "Android USB3 SDK v1.2.2112.9201" package and download it.
    • After downloading, unzip the package and install the "GxViewer_GetRawImage.apk" on the Android phone (users may need to allow installation of apps from unknown sources or 3rd party apps).
    • Connect the Daheng camera to the Android phone using a USB-C to Daheng cable (adapter).
    • Open the installed app ("GxViewer_GetRawImage") and grant access to the USB connection when prompted.
    • Users can adjust camera settings by swiping left in the app and then proceed to capture images.

    Video Tutorial

    A video tutorial demonstrating the camera setup is available at https://youtu.be/PtdU5qE6BSc.

    The openUC2 Camera Setup provides users with easy-to-follow instructions for configuring and utilizing webcams and Daheng cameras on different platforms, enabling seamless integration into various imaging applications and experiments.

    XIAO Sense Camera

    Coming Soon.

    You can have a glimpse here https://github.com/openUC2/openUC2-SEEED-XIAO-Camera/

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryElectronics/XYZ_stage_mico/index.html b/docs/Toolboxes/DiscoveryElectronics/XYZ_stage_mico/index.html index 45401a31c..29ab81887 100644 --- a/docs/Toolboxes/DiscoveryElectronics/XYZ_stage_mico/index.html +++ b/docs/Toolboxes/DiscoveryElectronics/XYZ_stage_mico/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -19,7 +19,7 @@ Fully assembled XYZ stage with high precision stepper motors, designed for seamless automation in microscopy setups.

    Key Features:

    • Mounting Flexibility: The XYZ stage can be easily mounted on top of a cube or suspended at the side, offering flexibility in integrating it into various experimental setups.

    • Interferometer and Microscopy Applications: This stage finds application in interferometers and light-sheet/fluorescence microscopes, where it plays a crucial role in precisely manipulating the sample in all directions.

    • Durable Construction: Constructed entirely from metal, the XYZ stage ensures robustness and stability during delicate experiments.

    • High Precision Stepper Motors: The stage is equipped with non-captive stepper motors, delivering exceptional precision during positioning operations.

    XYZ Stage in an Interferometer Setup Image showing two XYZ stages (one motorized and one manual stage) employed in an OCT / Michelson type interferometer.

    Setup and Integration: To assist users in setting up and integrating the XYZ stage into their experimental configurations, a comprehensive video guide is available. This instructional video can be viewed at https://www.youtube.com/embed/E_hhclFqx5g.

    For further information or inquiries regarding the openUC2 XYZ Micrometer Stage, interested parties can refer to the official openOCT project page at https://github.com/openUC2/openUC2-Hackathon-openOCTRemote. The project page contains additional details, resources, and support for utilizing the XYZ stage effectively in diverse research settings.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryElectronics/seeedmicroscope/index.html b/docs/Toolboxes/DiscoveryElectronics/seeedmicroscope/index.html index 6bafc7832..551d2d1ac 100644 --- a/docs/Toolboxes/DiscoveryElectronics/seeedmicroscope/index.html +++ b/docs/Toolboxes/DiscoveryElectronics/seeedmicroscope/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -54,7 +54,7 @@ Seeedmicroscope_40

    Focus sample with manual focusing stage Seeedmicroscope_41

    Using an Android APP

    For Android users, plesae have a look here: https://matchboxscope.github.io/docs/APP

    This app will help you connect and capture images using this microscope.

    Conclusion

    Congratulations! You have successfully assembled your modular microscope. With this microscope, you can now observe various samples and capture images using the camera connected to your smartphone or computer. This modular design allows for easy customization and experimentation, making it a versatile tool for exploring the microscopic world.

    Remember, the performance of the microscope might be affected by the modifications made to the objective lens, so adjust your expectations accordingly. Enjoy exploring and discovering the hidden wonders of the microcosmos!

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryElectronics/spectrometer/index.html b/docs/Toolboxes/DiscoveryElectronics/spectrometer/index.html index a221f4013..893d29da3 100644 --- a/docs/Toolboxes/DiscoveryElectronics/spectrometer/index.html +++ b/docs/Toolboxes/DiscoveryElectronics/spectrometer/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -30,7 +30,7 @@ 8. Example Plot of a spectrum (white light)

    Operating the Spectrometer

    • Calibration: Essential for accurate measurements.
    • Usage: Can be used to analyze spectra from various light sources.

    10. Example Plot of a spectrum (red LED)

    10. Example Plot of a spectrum (green LED)

    Further Reading and Resources

    For more in-depth information on spectroscopy and related subjects, refer to resources provided by Public Lab and other scientific publications.

    Public Lab: https://publiclab.org/wiki/spectrometry

    Gaudi Lab: https://www.gaudi.ch/GaudiLabs/?page_id=825

    ESPectrometer: https://matchboxscope.github.io/docs/Variants/ESPectrometer

    Youtube: https://www.youtube.com/watch?app=desktop&v=T_goVwwxKE4&ab_channel=Les%27Lab

    Software: https://github.com/leswright1977/PySpectrometer

    Contributing and Collaboration

    This open-source project welcomes contributions from everyone. Whether you're experienced in CAD design or programming,

    or just starting out, there are many ways to contribute. Check out our CONTRIBUTING guide for more details.

    Licensing and Collaboration Notes

    This project is licensed under the CERN open hardware license. We encourage users to share their modifications and improvements. All design files are available for free, but we appreciate feedback and collaboration.

    For details on the licensing, please visit License.md.

    Note: Design files were created using Autodesk Inventor 2019 (EDUCATION).

    Stay Connected

    If you find this project beneficial, please star this repository, follow us on Twitter, and cite our webpage in your work!

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryFluorescence/index.html b/docs/Toolboxes/DiscoveryFluorescence/index.html index ef68f84a1..26b99f896 100644 --- a/docs/Toolboxes/DiscoveryFluorescence/index.html +++ b/docs/Toolboxes/DiscoveryFluorescence/index.html @@ -9,13 +9,13 @@ - - + + - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryInterferometer/InlineHolography/index.html b/docs/Toolboxes/DiscoveryInterferometer/InlineHolography/index.html index 8e8f5f9bd..c1d463a1c 100644 --- a/docs/Toolboxes/DiscoveryInterferometer/InlineHolography/index.html +++ b/docs/Toolboxes/DiscoveryInterferometer/InlineHolography/index.html @@ -9,12 +9,12 @@ - - + +
    -

    openUC2 In-line holography

    Workshop Manual: Building an Inline Holographic Microscope with UC2

    Welcome to our workshop on building an inline holographic microscope using the UC2 modular microscope toolbox. In this experiment, we will create a lensless microscope that demonstrates temporal and coherence properties. By following the steps below, you will construct a simple yet effective holographic microscope to observe transparent samples.

    This experiment is an introduction into the UC2 toolbox and should give you a chance to get familiar with the core-idea of creating simple, but also complex optical setups using the modular system.

    Here you are going to learn:

    • assemble the cube
    • add inserts
    • arrange multiple cubes
    • lensless imaging

    This is what you want to build now:

    Resources

    • OLD an earlier (2019) workshop on this matter can be found here
    • CAD the full assembly including the description can be found in the Assembly-folder
    • SLIDES for the introduction into holography can be found in the PRESENTATION-folder

    Overview

    The inline holographic microscope utilizes the principles of holography and coherent light sources to capture and reconstruct 3D images of transparent samples. Instead of using traditional lenses, we rely on interference patterns between a reference beam and the scattered light from the sample. The resulting hologram is then computationally reconstructed to visualize the sample.

    Materials Needed

    1. UC2 Modular Microscope Toolbox (includes cubes and puzzle pieces).
    2. LED Holder.
    3. Gel Color Filter.
    4. Aluminum Foil with a pinhole.
    5. Transparent Sample (e.g., biological specimen or microstructure).
    6. Camera Sensor (e.g., ESP32 camera module).

    Theory of Operation

    1. Creating the Light Source +

      openUC2 In-line holography

      Workshop Manual: Building an Inline Holographic Microscope with UC2

      Welcome to our workshop on building an inline holographic microscope using the UC2 modular microscope toolbox. In this experiment, we will create a lensless microscope that demonstrates temporal and coherence properties. By following the steps below, you will construct a simple yet effective holographic microscope to observe transparent samples.

      This experiment is an introduction into the UC2 toolbox and should give you a chance to get familiar with the core-idea of creating simple, but also complex optical setups using the modular system.

      Here you are going to learn:

      • assemble the cube
      • add inserts
      • arrange multiple cubes
      • lensless imaging

      This is what you want to build now:

      Resources

      • OLD an earlier (2019) workshop on this matter can be found here
      • CAD the full assembly including the description can be found in the Assembly-folder
      • SLIDES for the introduction into holography can be found in the PRESENTATION-folder

      Overview

      The inline holographic microscope utilizes the principles of holography and coherent light sources to capture and reconstruct 3D images of transparent samples. Instead of using traditional lenses, we rely on interference patterns between a reference beam and the scattered light from the sample. The resulting hologram is then computationally reconstructed to visualize the sample.

      Materials Needed

      1. UC2 Modular Microscope Toolbox (includes cubes and puzzle pieces).
      2. LED Holder.
      3. Gel Color Filter.
      4. Aluminum Foil with a pinhole.
      5. Transparent Sample (e.g., biological specimen or microstructure).
      6. Camera Sensor (e.g., ESP32 camera module).

      Theory of Operation

      1. Creating the Light Source The holographic microscope begins with a specially prepared light source. An LED, filtered through a gel color filter and focused through a pinhole in aluminum foil, generates quasi-monochromatic coherent light. This coherent light source is essential for the interference patterns necessary for holography.

      2. Sample and Camera Setup A transparent sample is placed in the path of the coherent light source. As the light passes through the sample, it becomes scattered, creating a complex wavefront. The camera, integrated into the same cube as the sample holder, captures this scattered light as an interference pattern, known as the hologram.

      3. Fresnel Propagation When the scattered light reaches the camera sensor, it captures the intensity of the interference pattern. In inline holography, both the object and reference beams travel along the same path to the sensor, causing them to interfere. The Fresnel propagation is used to numerically propagate the hologram from the sensor plane to the object plane and vice versa.

        Fresnel propagation is a mathematical process that simulates the propagation of light waves between two planes. It utilizes the Fresnel-Kirchhoff diffraction integral to calculate the complex wavefront at a given distance from the hologram plane. This numerical transformation involves the Fast Fourier Transform (FFT), which efficiently converts the hologram from spatial to frequency coordinates.

      4. Fast Fourier Transform (FFT) @@ -23,7 +23,7 @@ https://youtu.be/2P4FSSlXXQA

        Conclusion

        Congratulations! You have successfully built an inline holographic microscope using the UC2 modular microscope toolbox. This simple yet powerful setup allows you to observe transparent samples and explore the fascinating world of holography and coherent light sources. By understanding the principles of Fresnel propagation and Fast Fourier Transform, you can reconstruct digital holograms and visualize 3D structures in your samples. Have fun experimenting with different samples and refining your holographic imaging techniques!

        Known Problems

        • distance between sensor/sample
        • stray Light
        • sample not sparse enough

        Very Experimental: Reconstruct Holograms with ImSwitch

        Prerequirements Here you will finde a guide how to setup the ImSwitch Software:

        • Download the Software package from Dropbox
        • Install Anaconda (Important: When you're asked to add Anaconda to the PATH environment, say YES!)
        • Install Arduino + all drivers
        • Install the CH340 driver
        • Extract ImSwitch.zip to /User/$USER$/Documents/ImSwitch (clone or download from GitHub)
        • Extract ImSwitchConfig.zip to /User/$USER$/Documents/ImSwitchConfig (clone or download from GitHub)
        • Optional: Install Visual Studio Code + the Python plugin => setup the Visual studio code IDE for Python

        Install ImSwitch for UC2

        • Open the anaconda command (Windows + R => "CMD" => enter)
        • Type: conda create -n imswitch
        • Wait until environment is created
        • Type: conda activate imswitch
        • Type: cd /User/$USER$/Documents/ImSwitch
        • Type: pip intall -r requirements.txt
        • Type: pip intall -e ./
        • Type: imswitch

        Reconstruction

        This video will show you how to reconstruct holographic data using UC2 and ImSwitch.

        https://youtu.be/CWXx0Dw-Jro

        Things to explore:

        • Get Familiar with ImSwitch
        • Get a sparse sample e.g. plankton on coverslip would be best, or just dust/sand/cheeck cells and try to acquire some holograms

        ADDITIONAL Speach-to-text

        The first experiment will be the inline holographic microscope. This is a relatively simple experiment where we can show both the temporal and especially coherence. We will create a lensless microscope where we use an LED that is filtered by a color filter and pinhole to create a quasi one of chromatic coherent light source. This is then illuminating the transparent sample that is sparse before the scattered wave is sitting the camera sensor. This is relatively simple to build with the C2 system; for this, we only need the LED holder, a gel color filter, as it sees from theaters, aluminum foil where we'll stitch in a hole in order to create a local pinhole, some space between this created light source and the sample, and then the sample that this ultimately glued onto the sensor very closely so that the pinhole virtually scales in size as the ratio between the distance of the light source to the sample and sample to the sensor. In order to build the system, we will place the here created light source on the far left; then another empty cube follows right next to it; then another empty cube follows on the right-hand side; and then we combine the sample mount and the camera into one cube so that the distance between the sample and the camera is minimized. All these cubes should be mounted on puzzle pieces on the lower end and the upper bar so that the whole system becomes stable. We will turn on the camera and also turn on the lights source. Then we go to the web app after connecting to the camera through Wi-Fi, and then we will try to see any variation in the contrast of the camera. If the contrast is not high enough because of this scattering background light, we have to cover the system with a box or with some closing so that there's no straight lights hitting the sensor. This will make a very bad result in the reconstruction. When you're lucky, you can see the sample as a kind of shadow on the sensor already. The core idea now is to reconstruct this digital hologram, where we have to carefully maximize the quality of the file image. Compression artifacts from the ESP32 camera are unavoidable and will eventually degrade the final image results. What we are going to do now is to temper in image and then back propagate the distance from the sensor to the sandal plane using a numerical transformation. What this really means is that we take the image and take every pixel and back propagated by a certain distance numerically. This is done using a fast-year transform where we first fiatransform the image so that it is in frequency space; then we multiply it with a parabolic face Factor, and then we inverse full-year transform the results to end up in real space again. This becomes a convolution of the Fresnel colonel, which essentially propagates every pixel edge of certain distance depending on the wavelength and sampling rate. We can conveniently do that in Python with the Script that is provided by the Jupiter notebook. For this, we go to the website of the ESP32, hit the capture button, and download the image onto the computer. Then we start the Jupiter notebook server by opening the command line in Windows or in Linux and enter Jupiter notebook. Then we go to the browser and open the example Jupiter notebook that will reconstruct our hologram. We will enter the path of our downloaded image file and then reconstruct the results. There are several problems which we can describe but not solve at the moment for stop inland holography, as the name already says, has the problem that the light source and the scattered wave interfere in line. That means the point source will create spherical waves that are propagating its free space and will become almost a plain wave when it's the sample. Here some parts of the wave are scattered where which means that a plane wave is altered in its face depending on the face of the microscopic example, and some portion of the wave is an altered. That means after the sample the unchecked and scattered wave are propagating to the sensor where the two amplitudes are superposing. That means they add up for stuff since our camera detector cannot record amplitudes since the object of frequency is very very high. We are averaging out over time. That means that we will record intensity values in the end. This also means that the information about the face is getting lost. When we are reconstructing the hologram, the color will differentiate whether the sample is before or behind the sensor since the face information is that anymore. This means that in the reconstruction, the so-called twin image always overlays the real image in the end. This causes an avoidable ringing artifacts in the reconstruction. There are some ways to remove it, for example by estimating the face using iterative algorithms or model-based approaches, where we take the full image acquisition process into account. Alternatively, suit also be machine learning algorithms where an algorithm estimates the background and remove these artifacts. However, here we won't use these algorithms as we just want to learn how we can reconstruct the simple.

        Some notes on the transform that we have just used here. Briefly, it is a transformation from spatial to frequency coordinates. This sounds very abstract, but for example, our ear does this all the time. When we talk, our voice generates a vibration of the air. That means different frequencies are oscillating and add up to something like noise. Our ear, in turn, has the cochlear where many nerve cells, in turn, are oscillating depending on the resonance frequency of every cell. In a way, they are unmixing the noise and modulate the different frequencies. That means that if you're singing like an A, there is the fundamental frequency and several higher and lower harmonics. And lens does something very similar but in two dimensions. You can have optical frequencies where, for example, a grating that is having stripes that represent on and off and on and off

        at a certain distance represent periodic structure. It lens when you place something in the focal plane will then flea transform this into the demodulated frequency components. When you, for example, have a periodic structure like a grating, it will produce two pieces in its Fourier transform or in its focal length on the object side. A fast Fourier transform is its equivalent in the computational science. You can take an image and then represent it in its frequency components for stock that means it tries to estimate the sum of all the different frequency components that make up the image. We use this fast Fourier transform in our code to bring it from real space to frequency space and back again. But since we start with an image without an amplitude or without the face, lack the information.

        This property creates additional artifacts since relax the information of the face when we record intensity values on our camera, we also limited to samples that I see like just for the tomt capture in the watcher. The optical resolution of our microscope is bound to the pixel size and the opening angle or the numerical aperture that is created by the illumination and the sensor size that we use to detect the image. However, it is a very nice way of demonstrating how long profil works and how we can detect images without a lens. For stop many different have used it, for example, to detect Malaria in blood. New sins the field of view is very Deutsch.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryInterferometer/Interferometer_intro/index.html b/docs/Toolboxes/DiscoveryInterferometer/Interferometer_intro/index.html index c532fbc98..cb5846018 100644 --- a/docs/Toolboxes/DiscoveryInterferometer/Interferometer_intro/index.html +++ b/docs/Toolboxes/DiscoveryInterferometer/Interferometer_intro/index.html @@ -9,13 +9,13 @@ - - + +
    -

    openUC2 Interferometer Introduction

    This is a a collection of different mini-tutorials to assemble the different optical systems using UC2. First, we will introduce the setup with a brief text. Afterwards, a little video will help you assembling the device. If you have any questions, please feel free to post a question in the Forum or in the Github Issue section.

    What will you learn?

    • What's inside the box?
    • How can we start different experiments?

    What's inside the box?

    Duration: 3

    Inside the box you will find a number of different cubes, all coming with different functionalities. Below you will find a list of all modules inside the discovery kit.

    Lasers and Beamexpanders

    Duration: 3

    Lasers and Interferometers

    Duration: 3

    Microscopes

    Duration: 3

    Polarization

    Duration: 3

    Microscope with Webcam

    Duration: 3

    - - +

    openUC2 Interferometer Introduction

    This is a a collection of different mini-tutorials to assemble the different optical systems using UC2. First, we will introduce the setup with a brief text. Afterwards, a little video will help you assembling the device. If you have any questions, please feel free to post a question in the Forum or in the Github Issue section.

    What will you learn?

    • What's inside the box?
    • How can we start different experiments?

    What's inside the box?

    Duration: 3

    Inside the box you will find a number of different cubes, all coming with different functionalities. Below you will find a list of all modules inside the discovery kit.

    Lasers and Beamexpanders

    Duration: 3

    Lasers and Interferometers

    Duration: 3

    Microscopes

    Duration: 3

    Polarization

    Duration: 3

    Microscope with Webcam

    Duration: 3

    + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryInterferometer/picturedTutorial/index.html b/docs/Toolboxes/DiscoveryInterferometer/MachZenderInterferometer/index.html similarity index 62% rename from docs/Toolboxes/DiscoveryInterferometer/picturedTutorial/index.html rename to docs/Toolboxes/DiscoveryInterferometer/MachZenderInterferometer/index.html index 372f1e235..d1c96c6d3 100644 --- a/docs/Toolboxes/DiscoveryInterferometer/picturedTutorial/index.html +++ b/docs/Toolboxes/DiscoveryInterferometer/MachZenderInterferometer/index.html @@ -1,25 +1,25 @@ - + -Tutorial | openUC2 Documentation +openUC2 Mach-Zender Interferometer | openUC2 Documentation - - + +
    -

    Tutorial

    Tutorial: Mach-Zender Interferometer

    Materials needed:

    • Laser diode
    • Hikrobot Camera (MV-CE060-10UC) with USB cable (Hikrobot Camera Software installation).
    • Small stage with gear.
    • Two kinematic mirrors (in cubes).
    • Two beam splitters in cube.
    • Sample holder (in cube).
    • Two empty cubes.
    • Base plates.
    • Screen.
    • Pinhole in cube.
    • Screwdriver to adjust alignment (1,5x60)
    • Two 100 mm converging lenses.

    Instructions for assembling the Mach-Zender Interferometer:

    Step 1: Build the base plate configuration

    Build the base plate configuration as shown. Note: At this point the laser diode should be turned off the whole time. Don't look at the laser directly. Always use screens to look for the laser light.

    Step 2: Align the laser diode with the pinhole

    Place the laser diode, an empty cube, and a 100 mm convergent lens in a straight line. Then, place the pinhole two cube units from the lens and place the screen after the pinhole. Turn the laser on and align it using by using the screws to center the beam with the pinhole.

    Step 3: Check beam collimation

    Check if the beam is collimated by placing the screen at different distances. The beam diameter should stay relatively the same size. If it is not the same size, this means that the distance between the laser and the lens should be adjusted. Turn the laser off.

    +

    openUC2 Mach-Zender Interferometer

    Tutorial: Mach-Zender Interferometer

    Materials needed:

    • Laser diode
    • Hikrobot Camera (MV-CE060-10UC) with USB cable (Hikrobot Camera Software installation).
    • Small stage with gear.
    • Two kinematic mirrors (in cubes).
    • Two beam splitters in cube.
    • Sample holder (in cube).
    • Two empty cubes.
    • Base plates.
    • Screen.
    • Pinhole in cube.
    • Screwdriver to adjust alignment (1,5x60)
    • Two 100 mm converging lenses.

    Instructions for assembling the Mach-Zender Interferometer:

    Step 1: Build the base plate configuration

    Build the base plate configuration as shown. Note: At this point the laser diode should be turned off the whole time. Don't look at the laser directly. Always use screens to look for the laser light.

    Step 2: Align the laser diode with the pinhole

    Place the laser diode, an empty cube, and a 100 mm convergent lens in a straight line. Then, place the pinhole two cube units from the lens and place the screen after the pinhole. Turn the laser on and align it using by using the screws to center the beam with the pinhole.

    Step 3: Check beam collimation

    Check if the beam is collimated by placing the screen at different distances. The beam diameter should stay relatively the same size. If it is not the same size, this means that the distance between the laser and the lens should be adjusted. Turn the laser off.

    Step 4: Set up the beam splitter and mirror

    Place the beam splitter and the kinematic mirror as shown. Place the pinhole two cube units away from the mirror and the screen behind it. Turn the laser on and align the kinematic mirror using the screws. Once it's done, turn the laser off.

    Step 5: Adjust the microscope objective and lens

    Place the microscope objective, followed by an empty cube and the 100 mm lens. You should adjust the distance between the objective and the 100 mm lens so that the beam is collimated after going through both. Place the screen after the lens. Turn the laser on and check the collimation. Adjust the distance as necessary. Turn the laser off.

    Step 6: Setup and alignment

    Place the camera on the sample arm as shown. Put the screen on the other arm exit. Place the sample holder using one half of the cube at a time to not collide with the microscope objective.

    Turn the laser on and use the screen to align both beams using the screws on the reference mirror.

    Step 7: Connect and adjust in the MVS app

    Connect the camera to the computer and open the MVS app. Block the reference beam. Move the coverslide such that your sample enters the FoV (Field of View). Unblock the reference beam. Zoom into the image to distinguish the fringe pattern in the MVS camera display. Adjust the angles of the reference mirror using the screws to change the fringe pattern as shown.

    Step 7: Data processing

    Process the data. Phase unwrapping possible.

    First Tests with Modifications to the Original Setup

    Using Lei code, the need of a linear stage for the sample was identified. Adjusting the objective and tube lens enhances the interference, making it crucial to use the ImSwitch interface to see the FFT in real time and optimize. The final goal is to move the position of the first order interference to use Lei algorithm (or some Phase unwrapping algorithm) to retrieve the Phase. To achieve this, two images need to be acquired: a sample image and a background image (without a cover slide or a slide region with no specimen).

    Result of Phase Unwrapping

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometer/index.html b/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometer/index.html index dc59b2200..8762b2f99 100644 --- a/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometer/index.html +++ b/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometer/index.html @@ -9,12 +9,12 @@ - - + +
    -

    openUC2 Michelson Interferometer

    Workshop Manual: Building a Michelson Interferometer using UC2

    In this workshop, we will construct a Michelson Interferometer using the UC2 modular microscope toolbox. The Michelson Interferometer is a device that measures the interference properties of light. We will treat light as a wave, with a very high frequency, and use it to perform interesting experiments.

    Materials Needed

    1. Green Laser Pointer with a relatively high temporal coherence.
    2. Lenses for beam expansion.
    3. Beam splitter plate or cube with a partially reflective mirror coating.
    4. Three mirrors.
    5. Screen or camera sensor (e.g., ESP32 camera module) with USB cable.
    6. UC2 Modular Microscope Toolbox (cubes, puzzle pieces, and holders).

    Theory of Operation

    A Michelson Interferometer splits a laser beam into two equal parts using a beam splitter. The two beams are then reflected by mirrors and recombined to interfere with each other. When the paths of the two beams are equal, they constructively interfere, resulting in a bright output. However, if one path is shifted by 1/4 of the wavelength, the beams destructively interfere, resulting in a dark output. +

    openUC2 Michelson Interferometer

    Workshop Manual: Building a Michelson Interferometer using UC2

    In this workshop, we will construct a Michelson Interferometer using the UC2 modular microscope toolbox. The Michelson Interferometer is a device that measures the interference properties of light. We will treat light as a wave, with a very high frequency, and use it to perform interesting experiments.

    Materials Needed

    1. Green Laser Pointer with a relatively high temporal coherence.
    2. Lenses for beam expansion.
    3. Beam splitter plate or cube with a partially reflective mirror coating.
    4. Three mirrors.
    5. Screen or camera sensor (e.g., ESP32 camera module) with USB cable.
    6. UC2 Modular Microscope Toolbox (cubes, puzzle pieces, and holders).

    Theory of Operation

    A Michelson Interferometer splits a laser beam into two equal parts using a beam splitter. The two beams are then reflected by mirrors and recombined to interfere with each other. When the paths of the two beams are equal, they constructively interfere, resulting in a bright output. However, if one path is shifted by 1/4 of the wavelength, the beams destructively interfere, resulting in a dark output. Certainly! Let's delve into more theoretical background about interference and how the Michelson Interferometer was historically used to measure the speed of light.

    Theoretical Background: Interference

    Interference is a phenomenon that occurs when two or more waves overlap in space and combine their amplitudes. When the waves are in-phase (their crests and troughs align), they constructively interfere, resulting in a larger amplitude. On the other hand, if they are out of phase (their crests and troughs are misaligned), they destructively interfere, resulting in a smaller or zero amplitude. Interference is a fundamental concept in wave physics and plays a crucial role in understanding the behavior of light.

    Michelson Interferometer and Measurement of the Speed of Light

    The Michelson Interferometer, invented by Albert A. Michelson in the late 19th century, is a classic optical device that exploits the principles of interference to measure various optical properties, including the speed of light.

    In the Michelson Interferometer setup, a light beam is split into two equal parts using a beam splitter. One part is directed towards a stationary mirror (the reference mirror) while the other part is directed towards a movable mirror (the sample mirror). The two beams are then reflected back towards the beam splitter, and they recombine. Depending on the path difference between the two beams, they may interfere constructively or destructively.

    By moving the sample mirror, the path difference between the two beams changes. When the path difference corresponds to an integral number of wavelengths (constructive interference), the interference pattern exhibits bright fringes. Conversely, when the path difference corresponds to a half-integral number of wavelengths (destructive interference), the pattern exhibits dark fringes.

    The key to measuring the speed of light with the Michelson Interferometer lies in precisely measuring the movement of the sample mirror. As the mirror is displaced, the fringe pattern shifts, and by measuring this shift, we can determine the change in path difference and, consequently, the speed of light.

    Michelson used this interferometer in an elegant experiment to measure the speed of light by comparing the time it took for light to travel in two perpendicular directions. This famous experiment was performed in 1879 and yielded a remarkably accurate value for the speed of light.

    The Michelson Interferometer remains an essential tool in modern optics and has found applications in diverse fields, including astronomy, spectroscopy, and interferometric microscopy.

    Interference is a fundamental concept in wave physics, and the Michelson Interferometer is a classic optical device that exploits this phenomenon to make precise measurements. By understanding the principles of interference and the working of the Michelson Interferometer, we gain valuable insights into the nature of light and its behavior in different optical setups. It stands as a testament to the ingenuity of scientific instruments and continues to play a significant role in advancing our understanding of the physical world.

    Tutorial: Michelson Interferometer

    Materials needed:

    • Laser diode
    • Hikrobot Camera (MV-CE060-10UC) with USB cable (Hikrobot Camera Software installation)
    • Stage with gear with mirror
    • Three kinematic mirrors (in cubes)
    • Beam splitter in cube
    • Sample holder (in cube)
    • One empty cube
    • 16 base plates
    • Screen
    • Pinhole in cube
    • Screwdriver to adjust alignment (1,5x60)

    Diagram:

    Instructions for assembling the Michelson's Interferometer:

    Step 1: Build a four base plate

    Build a four base plate as shown. This will be used to connect the laser diode, pinhole, the beamsplitter, and an empty cube. Add the base plates to fix them.

    Note: At this point the laser diode should be turned off the whole time. Don't look at the laser directly. Always use screens to look for the laser light.

    Step 2: Place the pinhole

    Place the pinhole such that it is as far as possible to the laser diode.

    Step 3: Close the diaphragm

    Close the diaphragm as much as possible to end up with a small hole.

    Step 4: Place the screen and align the laser

    Place the screen after the pinhole and turn the laser on. The alignment is most likely off. So to align the laser you should use the screwdriver and adjust the laser mount screws so that the beam is centered on the pinhole. Turn the laser off.

    @@ -33,8 +33,8 @@

    Step 11: Set up the camera

    Place the camera and fix it with the base plates. Connect it to the computer and open the MV Software. To check the MVS tutorial click (here).

    Step 12: Adjust the camera exposure

    Adjust the exposure time of the camera. You should see a fringe pattern. Try to adjust the reference mirror screws finely to bring the center of the interference pattern to the center of the camera.

    -

    Experimental Data

    This is the fully assembled UC2 interferometer with a green laser diode, a camera representing a scree and to digitize the inteference, a beamsplitter, a kinematic mirror and a mirror that can be translated along Z.

    If you bring the two beams on top of each other, you will be able to observe the interference pattern, which in case of one beam exactly overlaying the other will be a ring pattern. These rings are also called Newton rings and come from the fact that we interfere two divergent beams, leading to a super position of two spherical caps/waves.

    Using the ESP32 camera, we can quantify the motion of the beams and e.g. measure distances or angles.

    Conclusion

    Congratulations! You have successfully built a Michelson Interferometer using the UC2 modular microscope toolbox. This device allows you to explore the interference properties of light and perform fascinating experiments. As you move one of the arms, you will observe constructive and destructive interference patterns on the camera, demonstrating the wave-like nature of light. Have fun experimenting with different setups and learning more about the wave-particle duality of light!

    - - +

    Experimental Data

    This is the fully assembled UC2 interferometer with a green laser diode, a camera representing a scree and to digitize the inteference, a beamsplitter, a kinematic mirror and a mirror that can be translated along Z.

    If you bring the two beams on top of each other, you will be able to observe the interference pattern, which in case of one beam exactly overlaying the other will be a ring pattern. These rings are also called Newton rings and come from the fact that we interfere two divergent beams, leading to a super position of two spherical caps/waves.

    Using the ESP32 camera, we can quantify the motion of the beams and e.g. measure distances or angles.

    Conclusion

    Congratulations! You have successfully built a Michelson Interferometer using the UC2 modular microscope toolbox. This device allows you to explore the interference properties of light and perform fascinating experiments. As you move one of the arms, you will observe constructive and destructive interference patterns on the camera, demonstrating the wave-like nature of light. Have fun experimenting with different setups and learning more about the wave-particle duality of light!

    + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial/index.html b/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial/index.html index 9bd80fc54..ca0b93e5c 100644 --- a/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial/index.html +++ b/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial/index.html @@ -9,14 +9,14 @@ - - + +
    -

    HIK-Camera Software Installation

    Install MVS App for Camera Utilization

    Camera model: MV-CE060-10UC. Visit the HIKROBOTICS website and download the MVS software suitable for your computer. Below are steps exemplifying the software installation for Mac.

    Install the downloaded file.

    Open the MVS Software.

    You should see the following window.

    Connect the camera. Refresh the USB line to detect the camera.

    -

    Select the make-link button on the detected camera.

    The following window should be displayed.

    Click on the play button in the actions bar of the camera.

    If properly connected, you should see a real-time image. Adjust the exposure if the image is overexposed.

    To adjust the exposure time, go to the Feature tree, select the Acquisition Control Category, and change the Exposure Auto option to Continuous.

    Now, a clear image with good contrast should be visible.

    To stop recording, click on the stop button in the camera's actions bar.

    To disconnect the camera, click on the break-link button next to the detected camera in the USB devices list.

    - - +

    HIK-Camera Software Installation

    Install MVS App for Camera Utilization

    Camera model: MV-CE060-10UC. Visit the HIKROBOTICS website and download the MVS software suitable for your computer. Below are steps exemplifying the software installation for Mac.

    Install the downloaded file.

    Open the MVS Software.

    You should see the following window.

    Connect the camera. Refresh the USB line to detect the camera.

    +

    Select the make-link button on the detected camera.

    The following window should be displayed.

    Click on the play button in the actions bar of the camera.

    If properly connected, you should see a real-time image. Adjust the exposure if the image is overexposed.

    To adjust the exposure time, go to the Feature tree, select the Acquisition Control Category, and change the Exposure Auto option to Continuous.

    Now, a clear image with good contrast should be visible.

    To stop recording, click on the stop button in the camera's actions bar.

    To disconnect the camera, click on the break-link button next to the detected camera in the USB devices list.

    + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/index.html index 569f217e8..d22cf3cc0 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -19,7 +19,7 @@ The amount of reflected light is specified by the optical properties of the reflecting surface, such as plastic sheets, glass, or highways.
    The incident angle of the incoming electromagnetic lightwave and refractive indices of media in which light travels through them have an essential role in the polarization degree of the reflected and refracted polarized light beams.

    You can see the reflection and transmission of unpolarized light with most of the incident angle (𝜃) values below.

    What is the Brewster angle?

    When the incident ray travels from a less dense medium (n1) to a higher dense medium (n2) with a critical angle (𝜃_B), the reflected ray is perfectly s-polarized in which the orientation of the electric field vectors are perpendicular to the plane of incidence. Otherwise, the refracted beam has a 90-degree polarization angle, partially p-polarized. This critical angle is called a Brewster angle or polarization angle and is represented by 𝜃_B in the scheme below.

    Brewster angle can be easily calculated using refractive indices of traveling media of light. In our experiment, we used air (n1 = 1) as the first medium in which light comes first and reflects in this part and microscope slide glass (n2 = 1.5) as the second medium, and the light transmits through. When we calculate the Brewster angle for our experiment, it equals approximately 57 degrees, and we can find the equation below.

    Parts

    Modules for this setup

    NamePropertiesPriceLink#
    4×4 BaseplateSkeleton of the System21.79 €Base-plate1
    MODULE: Polarizer CubeIt holds the linearly polarizing filter4.31 €Linear Polarizer1
    MODULE: Kinematic Microscope Slide HolderIt is used to insert the microscope slide with Brewster angle (53 degrees)3.7 €Microscope Slide Holder1
    MODULE: Laser CubeLASER source holder17.68 €Light Source 1
    EXTRA MODULE: Screen Holder CubeIt holds the Display Screen (Not Used in Practice)1 €Screen1

    Parts to print

    Additional components

    • Check out the RESOURCES for more information!
    • 1 × Linear Polarizing Sheet 🢂
    • 67 × 5 mm Ball magnets 🢂
    • 28 x Screws DIN912 ISO 4762 - M3×12 mm 🢂
    • 3 x Screws DIN912 ISO 4762 M2×16 mm 🢂
    • Microscope Rectangular Coverslips 🢂
    • 1 × 5 mW Blue UV Laser Pointer 🢂

    Assembly

    All necessary parts to assemble a Microscope Slide Glass Holder Module be gathered in the image below:

    Results

    Brewster's angle experiment setup can be prepared easily. After printing and assembling the module parts, we try to find a critical angle or Brewster's angle as much as we can.

    In this setup, we use LASER as a light source because it is easier to observe the polarization degree changes by reflection. As a first step, LASER light beams reflect from microscope slide glass and pass through a linear polarizer. Then a piece of paper is inserted on the sample holder comb. Finally, we can observe the totally polarized reflected light.

    We should make a good alignment to find the Brewster angle as the incidence angle of the incoming light. In practice, it is hard because of using screws, we could not find the exact incidence angle of a microscope slide. Nevertheless, I could take results that are almost perfectly polarized light after reflection in almost critical angle.
    You can see the reflected laser light beam without alignment at a random incident angle above.

    In the image below, the incoming beam is reflected with almost Brewster angle, 57 degrees for microscope slide glass:

    Let's look at our almost perfectly polarized light by reflection experiment video records. The orientation direction of the linear polarizer only changes in 1st and 2nd videos, and we see that reflected light is totally polarized almost. The light is blocked, and we can see almost no light after the polarizer when the orientation of the linear polarizer is perpendicular to the polarization orientation of the reflected beam.

    New Ideas

    We are open to new ideas. Please contribute to the project freely, this place is a free country which is built by codes and machines :robot:

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/index.html index 550b44b46..74a33397b 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -20,7 +20,7 @@ The incoming light passes through a linear polarizer (grey UC2 cube). In the next step, new demonstrated linearly polarized light travels to the circular polarizer module (black UC2 cube). When an observer looks from the circular polarizer side, she/he can see the circularly polarized light.

    The circular polarizer filter was taken out of 3D cinema glasses, and it was inserted into a sample holder insert. This new circular polarizer filter insert was assembled with a UC2 unit block. Ta-da! The circular polarizer cube is ready for flight.

    The effect of the propagation direction of the linear polarizer can be seen in the video below. The polarization direction of light before the circular polarizer changes with turning the wheel of the linear polarizer and changing its orientation.

    New Ideas

    Dear Visitor,
    you have an opportunity to view our experiments. If you have a new idea, just open a new issue and shine our eyes with your light.
    Greetings from UC2 Team

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/index.html index d23a240a6..0e602d8c3 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/index.html @@ -9,15 +9,15 @@ - - + +

    Crossed Polarizers

    Crossed Polarizers are used to analyze the polarization of light. We use two linear polarizers, which are located perpendicular to each other. [1]

    In the experiment, the polarizing direction of the first polarizer is oriented vertically to the incoming beam, and it will allow passing only vertical electric field vectors. After the first polarizer, we have an s-polarized light wave. [2]

    The second polarizer is located horizontally to the electric field vector. It blocks the wave which passes through the first polarizer. These two polarizers should be oriented at the right angle with respect to each other. You can see the orientation of the linear filters and light polarization change during the experiment in the figure below.

    Time to build a Crossed Polarizers setup!

    Parts

    Modules for this setup

    NamePropertiesPriceLink#
    4×1 BaseplateSkeleton of the System5.47 €Base-plate1
    MODULE: Polarizer CubeIt holds the linearly polarizing filter8.62 €Linear Polarizer2
    EXTRA MODULE: Sample Holder CubeIt holds the Sample (Not Used in Practice)1.3 €Sample Holder1
    EXTRA MODULE: Screen Holder CubeIt holds the Display Screen (Not Used in Practice)1 €Screen1
    EXTRA MODULE: Flashlight Lamp CubeLight Source7.2 €Flashlight1

    Parts to print

    Additional components

    • Check out the RESOURCES for more information!
    • 1 × Linear Polarizing Sheet 🢂
    • 16 × 5 mm Ball magnets 🢂
    • 16 x Screws DIN912 ISO 4762 - M3×12 mm 🢂
    • 6 x Screws DIN912 ISO 4762 M2×16 mm 🢂
    • NOT USED 1 × flashlight 🢂

    Assembly

    If all written modules are used in the experiment, the setup will look like:

    Results

    We printed and assembled two Linear Polarizer module parts. Then, we bought the necessary components and inserted them into cubes.
    You will find the basic version of Crossed Polarization experiment without a specific sample and additional light source below. We demonstrated the experiment with a room light.

    We can observe the direct effect of the angle between two linear polarizers in the video below. The intensity of passing light through crossed polarizers changes when the direction angle of the polarization filter changes 360 degrees.

    New Ideas

    We are open to new ideas. Please contribute to the project freely, this place is a free country which is built by codes and machines :robot:

    References

    [1] Introduction to Polarized Light. (n.d.). Nikon’s MicroscopyU. Retrieved February 15, 2021, from https://www.microscopyu.com/techniques/polarized-light/introduction-to-polarized-light
    [2] Logiurato, F. (2018). Teaching Light Polarization by Putting Art and Physics Together. The Physics Teachers, 1–5. https://arxiv.org/ftp/arxiv/papers/1803/1803.09645.pdf

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/index.html index c19d1f31b..280662046 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -18,7 +18,7 @@ The printed and assembled Sample Holder Comb module with nine microscope glasses:

    You will find the basic version of the experiment without a Screen adn Linear Polarizer module below.

    Images of the resulting experimental setup;
    side view (top) , top view (bottom)

    We can observe the direct effect of the rotational angle of the linear polarizer in the video below. Laser light travels to microscope slides and air gap media several times and gets close to fully polarized light. We can see this effect eith adding Linear Polarizer cube.
    The intensity of passing light through linear polarizer changes when the direction angle of the polarization filter changes 360 degrees.

    We can see the change with two videos below. They demonstrates the experiment from two different views.

    New Ideas

    Rat-Tat! We are here to hear new ideas. Please don't shy and have a heart-to-heart talk with us. 💝

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/index.html index fe07e2185..b5dbcd4fe 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/index.html @@ -9,13 +9,13 @@ - - + +

    Newton's Rings Experiment

    Soap bubbles, oil slicks, or oxidized metal surfaces can create interference patterns under the white light illumination. In Newton's fringes, light reflects as a constructively or destructively interfered when the light waves travel through two surfaces. These combinations can be created using glass-air or air-glass contacts. These two interferences generate a concentric ring pattern of rainbow colours in white light illumination. In the same way, monochromatic light creates dark-light rings.

    The simplest example can be made using two well-cleaned microscope slides as interfaces. Air film is enclosed between two slides inconsistently, and irregular-coloured fringes are generated under the daily light. When the pressure on the microscope slides changes, fringes move and change.

    In the Newton's Rings Experiment, we used a convex lens whose surface is placed on an optical plane glass, a microscope glass, from its long focal length side. These two pieces are held together with non-uniform thin air film. After light illumination through these surfaces,the air gap and random pressures on the microscope slide and plano-convex lens cause the generation of irregular coloured or single-colour fringes; Newton's Rings.

    The details of Experiment Modules

    Linear Polarizer is used in this experiment to visualize the polarization change of reflected lights from two media on the interference pattern.

    Additional module design was made for combining the microscope glass slide and plano-convex lens inside one cube insert. You can see rendered image of the Newton's Rings Lens-Slide Holder Module from Inventor.

    s

    We used laser as a light souce in the setup. During the experiment, we extended beam size of the pen laser from 2 mm to 6 mm using a regular Beam Expander Module.

    Parts

    Modules for this setup

    NamePropertiesPriceLink#
    4×4 BaseplateSkeleton of the System21.79 €Base-plate1
    MODULE: Beam Expander CubeIt expands the laser beam size13.55 €Beam Expander1
    MODULE: Beam Splitter CubeIt splits the incoming beam and recombine them29.17 €Beam Splitter Holder1
    MODULE: Newton's Rings Slide-Lens Holder CubeIt creates Newton's Rings7.54 €Lens - Slide Holder1
    MODULE: Polarizer CubeIt holds the linearly polarizing filter4.31 €Linear Polarizer1
    MODULE: Laser CubeLASER source holder17.68 €Light Source1
    EXTRA MODULE: Screen Holder CubeIt holds the Display Screen (Not Used in Practice)1 €Screen1

    Parts to print

    Additional components

    • Check out the RESOURCES for more information!
    • 1 × Linear Polarizing Sheet 🢂
    • 64 × 5 mm Ball magnets 🢂
    • 44 x Screws DIN912 ISO 4762 - M3×12 mm 🢂
    • 3 x Screws DIN912 ISO 4762 M2×16 mm 🢂
    • 1 x Beamsplitter Cube (Art. 2137) 🢂
    • 1 x Microscope Rectangular Coverslips 🢂
    • 1 x Plano-Convex Lens 🢂
    • 1 × 5 mW Blue UV Laser Pointer 🢂
    • 1 x iPhone 5 Lens f'=3mm (separated from an iPhone camera spare part) 🢂
    • 1 x Achromat Lens f' = 26,5 mm 🢂

    Assembly

    Results

    We started with building of UC2 Modules: design, print, assemble and be ready for testing. You can see our Beam Expander Cube on the 4x1 Baseplate below.

    Lens - Slide Holder Module is the key element for the generation of Newton's Rings. Necessary Module parts are shown in the image below.

    Assembled and Ready-To-Use module should look like ...

    The experimental procedure begins with

    • installation the Laser and Beam Expander Modules on the 4x4 Baseplate.

    • After checking the expansion of the laser beam width, Beam Splitter Cube is added to the setup.

    • One of the divided incoming beams is directed to the Newton's Rings Lens & Microscope Holder cube, and light reflects from the convex lens-plate glass combination through the beam splitter cube, then on observation screen.

    Demonstrating the experiment is much easier with a laser light source and results in visible fringes. The Newton's Fringes will vary in colour from inner to outer circles if a white light source is replaced with a laser source.

    Let's zoom in Newton's Fringes with more experiment images!

    The effect of the polarization angle change of the Linear Polarizer Filter can be seen in the video.

    New Ideas

    We are open to new ideas. Please contribute to the project freely, this place is a free country which is built by codes and machines :robot:

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/index.html index 036686c04..00cb96655 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/index.html @@ -9,14 +9,14 @@ - - + +

    Polarization Experiment using Optically Active Solution

    The optical activity was discovered by Arago in the quartz in 1811. In 1847, molecular chirality was observed by scientist Louis Pasteur. He found that natural tartaric acid is optically active, and its external crystals have isomerism and chiral morphology [1].

    Two molecules with the same chemical formula, the mirror image of the other, describe molecular chirality. These come in two varieties: dextrorotatory (rotate plane-polarized light clockwise) and levorotatory (counterclockwise).

    Sucrose is a disaccharide made of glucose and fructose and dextrorotatory, which rotates the plane-polarized light to the right. A well-known example of sucrose is table sugar produced naturally in plants. Fructose is a simple ketonic sugar and levorotatory which rotates the plane-polarized light to the left. Glucose is a simple sugar that belongs to the carbohydrate family and is dextrorotatory. The molecules of fructose and glucose are mirror images of each other. Corn syrup is one of the most commonly used sugar solutions [2].

    Two simple sugar-water solutions were prepared and used in the experiment. The first solution was produced with one cup of table sugar and one cup of water. Table sugar is sucrose and dextrorotatory, turning clockwise to the right plane-polarized light. Grape sugar is dextrorotatory and glucose, and the second solution mixes grape sugar (Traubenzucker) and water components in the same amount. It rotates the incoming light polarization state to the right, clockwise direction. However, two solutions have different polarization states at the same time because of their molecular structure and demonstrate different colors inside the crossed polarizers.

    s

    In the image, Table sugar-water solution is shown in left-side, Grape sugar-water solution is in the right glass.

    Parts

    Modules for this setup

    NamePropertiesPriceLink#
    4×1 BaseplateSkeleton of the System5.47 €Base-plate1
    MODULE: Polarizer CubeIt holds the linearly polarizing filter8.62 €Linear Polarizer2
    MODULE: Active Solution ChamberIt contains sugar-water solutions.7.32 €Active Solution Chambers1
    EXTRA MODULE: Flashlight Lamp CubeLight Source7.2 €Flashlight1

    Parts to print

    Additional components

    • Check out the RESOURCES for more information!
    • 1 × Linear Polarizing Sheet 🢂
    • 24 × 5 mm Ball magnets 🢂
    • 20 x Screws DIN912 ISO 4762 - M3×12 mm 🢂
    • 6 x Screws DIN912 ISO 4762 M2×16 mm 🢂
    • 2 x metal plates
    • Microscope Rectangular Coverslips 🢂
    • NOT USED 1 × flashlight 🢂

    Assembly

    Results

    We printed and assembled two Linear Polarizer and Active Solution Chamber module parts. Then, we bought the necessary components and inserted them into cubes.
    You can see the datils of the Active Solution Chamber designs.

    Two different chamber design is shown in the image below. Left-side chamber has a container only for 1 active solution. In the other one, two different mixtures can be observed in the same time.

    You can find the basic version of Polarization Using Optically Active Solution experiment with an additional flashlight source below. Depends on the experiment place conditions, you can add an extra light source.

    The chamber module was inserted between two linear polarizers, the Crossed Polarizers. We can observe the direct effect of the angle between two linear polarizers in the video below. The intensity of passing light through crossed polarizers changes when the direction angle of the polarization filter changes 360 degrees.

    Experimental result of two optically active solutions is shown in the video:

    • Left Is Grape Sugar-water Solution (Glucose) Table,
    • Right Is Sugar-water Solution (Sucrose

    References

    [1] Gal, J. (2017). Pasteur and the art of chirality. Nature Chemistry, 9(7), 604–605. https://doi.org/10.1038/nchem.2790

    [2] Logiurato, F. (2018). Teaching Light Polarization by Putting Art and Physics Together. The Physics Teachers, 1–5. https://arxiv.org/ftp/arxiv/papers/1803/1803.09645.pdf

    New Ideas

    We are open to new ideas. Please contribute the project freely, this place is a free country which is built by codes and machines :robot:

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/index.html index 86a1d4bc4..c98a74378 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -18,7 +18,7 @@ Birefringence is the optical property of a material that has a dependent refractive index to polarization and propagation direction of light. These optically anisotropic materials are said to be birefringent.

    In general, birefringence is the double refraction of light. Each incoming light through the birefringent material such as calcite crystal is split by polarization into two rays; an ordinary and an extraordinary ray. These rays have different paths and polarizations.

    Common birefringent materials are;

    * best characterized birefringent materials are crystals
    * many plastics under mechanical stress such as cellophane or plastic boxes
    * many biological materials such as collagen, found in cartilage, tendon, bone, corneas, and several other areas in the body or some proteins.
    Polarized light microscopy is commonly used in biological tissue.

    Birefringence is used in many optical and medical devices. In medical applications, it can be used for the measurement of the optical nerve thickness or the diagnosis of glaucoma.

    Well then, what is the connection with polarization?

    Let's think. You ordered a new T-shirt from Amazon. You tried it and liked it. How beautiful! But wait. You can use a plastic cargo package for a polarization experiment and demonstrate the stress birefringence of a plastic sheet easily. Yesss, you can make science using 'garbage' too.
    Let's look at that more closely!

    Polarizers are frequently used to detect stress in plastics; birefringence. In this experiment, we use basic materials from our daily life as samples and see birefringence with our naked eyes. Let's collect simple objects such as plastic boxes, plastic cutlery (Image 1) or plastic packages. Even we can prepare our own birefringent object (Image 2) using a plastic punched pocket and sticky tape.

    Image 1 :

    Image 2:

    Stress Birefringence

    Stress birefringence results with stressed or deformation of isotropic materials and applying stressed causes a loss of physical isotropy and generates birefringence.

    How can stress be applied? Externally or as another method can be used. A birefringent plastic object is manufactured using injection molding and is cooled.

    Parts

    Modules for this setup

    NamePropertiesPriceLink#
    4×1 BaseplateSkeleton of the System5.47 €Base-plate1
    MODULE: Polarizer CubeIt holds the linearly polarizing filter8.62 €Linear Polarizer2
    MODULE: Sample Holder CubeIt holds the Birefringent Samples3.47 €Sample Holder1
    EXTRA MODULE: Screen Holder CubeIt holds the Display Screen (Not Used in Practice)1 €Screen1
    EXTRA MODULE: Flashlight Lamp CubeLight Source7.2 €Flashlight1

    Parts to print

    Additional components

    • Check out the RESOURCES for more information!
    • 1 × Linear Polarizing Sheet 🢂
    • 16 × 5 mm Ball magnets 🢂
    • 24 x Screws DIN912 ISO 4762 - M3×12 mm 🢂
    • 6 x Screws DIN912 ISO 4762 M2×16 mm 🢂
    • Birefringent Materials or Samples
    • NOT USED 1 × flashlight 🢂

    Assembly

    If all written modules are used in the experiment, the setup will look like:

    Results

    You will find the basic version of the Stress Birefringence experiment without an extra light source and sample holder below.
    A sample is placed into the Crossed Polarizers in the setup, and color patterns can be observed clearly. The polarization of a light ray is rotated after passing through a birefringent material and the amount of rotation is dependent on wavelength.

    The printed cube parts were assembled and the result images of experiences were taken for 3 different birefringent materials.

    In the 1st Experiment, we prepared our sample using a plastic punched pocket and randomly applied sticky tape on it.

    In the 2nd Experiment, we used a plastic piece as a sample for the setup.

    In the 3rd Experiment, plastic cutlery was used.

    New Ideas

    Rat-Tat! We are here to hear new ideas. Please don't shy and have a heart-to-heart talk with us. 💝

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/index.html index 5947e9842..40c8952f0 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/index.html @@ -9,8 +9,8 @@ - - + +
    @@ -19,7 +19,7 @@ Some light will pass through three polarizers if we add a third polarizer between these two crossed polarizers.

    In the Three Linear Polarizers setup, the amount of light passing through the polarizers can be calculated by the Law of Malus, cosine-squared law.

    I : the intensity of passing light through polarizers (total amount of passing light into three polarizers setup)
    I(0) : the intensity of incoming light
    θ : the angle between the transmission axes of two polarizers

    The polarization direction of the first polarizer is oriented vertically to the incoming beam at 0 degrees. Incoming unpolarized light passes through the first polarizer (linear s-polarized). After the first polarizer, the vertically polarized light travels to the second linear polarizer, which is rotated by 45 degrees to the first polarizer. Then the traveling light passes through the third polarizer (linear p-polarized), oriented at 90 degrees tilted for the first polarizer. Due to the orientation angle of each linear polarizer, transmitted light intensity changes based on the Law of Malus.

    Three linear polarizers are used in the experiment, and each of them has different angles concerning the transmission axis.

    Parts

    Modules for this setup

    NamePropertiesPriceLink#
    4×1 BaseplateSkeleton of the System5.47 €Base-plate1
    MODULE: Polarizer CubeIt holds the linearly polarizing filter13.43 €Linear Polarizer3
    EXTRA MODULE: Sample Holder CubeIt holds the Sample (Not Used in Practice)1.3 €Sample Holder1
    EXTRA MODULE: Screen Holder CubeIt holds the Display Screen (Not Used in Practice)1 €Screen1
    EXTRA MODULE: Flashlight Lamp CubeLight Source7.2 €Flashlight1

    Parts to print

    Additional components

    • Check out the RESOURCES for more information!
    • 1 × Linear Polarizing Sheet 🢂
    • 16 × 5 mm Ball magnets 🢂
    • 24 x Screws DIN912 ISO 4762 - M3×12 mm 🢂
    • 9 x Screws DIN912 ISO 4762 M2×16 mm 🢂
    • NOT USED 1 × flashlight 🢂

    If all written modules are used in the experiment, the setup will look like:

    Assembly

    Results

    The basic version of the Three Polarizers experiment without a specific sample and extra light source below is demonstrated. You can see the experiment images below.

    The effect of the angle between two linear polarizers can be seen in the video below. The intensity of passing light on the eye of the observer through the polarizers changes when the wheel insert of polarization filter wheeled 45 angles.

    New Ideas

    We are open to new idea source (dad joke about the open-source project 😐 ). Just open a new issue and spread your idea!

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/index.html b/docs/Toolboxes/DiscoveryPolarization/index.html index a57298798..fc4787a36 100644 --- a/docs/Toolboxes/DiscoveryPolarization/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/index.html @@ -9,13 +9,13 @@ - - + + - - + + \ No newline at end of file diff --git a/docs/Toolboxes/index.html b/docs/Toolboxes/index.html index dc37933c7..cd48946d1 100644 --- a/docs/Toolboxes/index.html +++ b/docs/Toolboxes/index.html @@ -9,13 +9,13 @@ - - + +

    Educational Kits

    CoreBox: Entry-Level Education Box

     - Features and Specifications
    - Assembling the CoreBox
    - Core Lens, Telescope, and Microscope

    Discovery Kit: Extension of CoreBox

     - Adding Modules to the Discovery Kit
    - Enhanced Functionality

    Interferometer Kit

     - You can build a Michelson Interferometer
    - Try enhancing it to become a MAch Zhender Microscope
    - Ultimatively test the microscope extension and reconstruct images using holography
    -
    - - + + \ No newline at end of file diff --git a/docs/WORKSHOPS/Workshop Nigeria/index.html b/docs/WORKSHOPS/Workshop Nigeria/index.html index 3e19d65a3..d89bd29ec 100644 --- a/docs/WORKSHOPS/Workshop Nigeria/index.html +++ b/docs/WORKSHOPS/Workshop Nigeria/index.html @@ -9,13 +9,13 @@ - - + +

    UC2 Microscopy Building Workshop at BioRTC Yobe University, Nigeria

    Welcome to the UC2 Microscopy Workshop! 📷🔬

    If you've ever been curious about the fascinating world of microscopy, you're in the right place! In this workshop, we will take you on a journey through the core concepts of microscopy, starting with lenses and interferometry, where you'll learn how different waves superpose to create powerful imaging techniques.

    Our approach centers around the open-source modular toolbox, UC2. This revolutionary system is built on the idea that every optical, mechanical, or electrical component can be mounted inside a compact 50mm cube. With a wide variety of components already available in our extensive library, you'll have the flexibility to design and build your own optical setups, limited only by your creativity.

    We'll kick off the workshop with the fully lensless microscope, utilizing just an LED, spatial filter, sample, and camera sensor. As we progress, you'll upgrade to a finite corrected objective lens, improving the resolution and focusing capabilities on the camera chip. We'll explore different microscopy techniques, including directional microscopy and light sheet microscopy, where the alignment of light enhances optical resolution along the axis.

    The heart of our workshop is the UC2-produced microscope, aptly named "sub." Although basic, it is the perfect tool to grasp the fundamental concepts of microscopy. From there, the possibilities are limitless as you delve into designing and printing specific inserts to adapt the system for your experiments.

    The UC2 system was born out of a quest for a small, affordable microscope for lifestyle microscopy imaging inside an incubator. As it evolved, we expanded its modularity, adding different contrast mechanisms and extensions like fluorescence and more. The success of this open-source initiative has been demonstrated through various publications, showcasing its applications in structured illumination microscopy, focal microscopene, and beyond.

    Our mission is to bridge the gap between education and real-world applications, providing a platform where anyone, regardless of experience, can get creative with optics. We strive to make microscopy accessible and affordable for all, and we are excited to announce the birth of our company, now headquartered in Gina, as we embark on a journey to revolutionize microscopy.

    So, if you're ready to dive into the world of microscopy, join us in this workshop as we build and enhance simple microscopes, bring them to life with software and image processing, and unlock the incredible potential of UC2 and open-source hardware.

    Let's embark on this adventure together! Happy exploring! 🚀✨

    Inline Holographic Microscope:

    Simple SEEED ESP32S3 Xiao Sense-based microscope:

    Michelson Interferometer:

    Light-sheet microscope:

    - - + + \ No newline at end of file diff --git a/docs/WORKSHOPS/index.html b/docs/WORKSHOPS/index.html index e99635f56..756f8f503 100644 --- a/docs/WORKSHOPS/index.html +++ b/docs/WORKSHOPS/index.html @@ -9,13 +9,13 @@ - - + +

    openUC2 Workshops

    From time to time we try to share our knowledge in various formats. If you want to learn more how you can have an openUC2 workshop near you, please contact us! We would be happy to introduce you into the world of open optics.

    - - + + \ No newline at end of file diff --git a/docs/intro/index.html b/docs/intro/index.html index 59365f8b6..6e5e7ac5b 100644 --- a/docs/intro/index.html +++ b/docs/intro/index.html @@ -9,13 +9,13 @@ - - + +

    openUC2 Documentation

    Here you can find all information to enhance, repair, improve, use, communicate,.... our optical toolbox openUC2. Did not find what you were looking for? No problem. Send us a mail or write an issue in our github repository https://github.com/openUC2/UC2-GIT/issues.

    Introduction into the openUC2 toolbox

    • Overview of openUC2
    • Purpose and Scope of the Toolbox
    • Key Features and Components
    • Getting Started Guide
    - - + + \ No newline at end of file diff --git a/index.html b/index.html index 9f402ea1f..de6d99951 100644 --- a/index.html +++ b/index.html @@ -9,13 +9,13 @@ - - + +

    openUC2 Documentation

    Seeing is believing. But better with the docs!

    Learning Kits (Explorer/Discovery)

    Learning Kits (Explorer/Discovery)

    Step by step guides to learn everything about optics.

    Cutting the Edge! (Investigator)

    Cutting the Edge! (Investigator)

    Get the most of your ready-to-use microscopes.

    Anything else.

    Anything else.

    Anything that is yet missing.

    - - + + \ No newline at end of file diff --git a/markdown-page/index.html b/markdown-page/index.html index 616c30a23..5b13ace49 100644 --- a/markdown-page/index.html +++ b/markdown-page/index.html @@ -9,13 +9,13 @@ - - + +

    Markdown page example

    You don't need React to write simple standalone pages.

    - - + + \ No newline at end of file diff --git a/search/index.html b/search/index.html index bb7e18efd..1974e6e29 100644 --- a/search/index.html +++ b/search/index.html @@ -9,13 +9,13 @@ - - + +

    Search the documentation

    - - + + \ No newline at end of file diff --git a/sitemap.xml b/sitemap.xml index 269e0cc5e..620999757 100644 --- a/sitemap.xml +++ b/sitemap.xml @@ -1 +1 @@ -https://docs.youseetoo.org/blogweekly0.5https://docs.youseetoo.org/blog/archiveweekly0.5https://docs.youseetoo.org/blog/first-blog-postweekly0.5https://docs.youseetoo.org/blog/long-blog-postweekly0.5https://docs.youseetoo.org/blog/mdx-blog-postweekly0.5https://docs.youseetoo.org/blog/tagsweekly0.5https://docs.youseetoo.org/blog/tags/docusaurusweekly0.5https://docs.youseetoo.org/blog/tags/facebookweekly0.5https://docs.youseetoo.org/blog/tags/helloweekly0.5https://docs.youseetoo.org/blog/tags/holaweekly0.5https://docs.youseetoo.org/blog/welcomeweekly0.5https://docs.youseetoo.org/markdown-pageweekly0.5https://docs.youseetoo.org/searchweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/Encoderweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/Homeweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/INTROweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/LEDArrayweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/Motorweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/PinConfigweekly0.5https://docs.youseetoo.org/docs/Electronics/PS4-Controllerweekly0.5https://docs.youseetoo.org/docs/Electronics/UC2-REST/ESP32_Messaging_Callbackweekly0.5https://docs.youseetoo.org/docs/Electronics/UC2-REST/ESP32_Motorweekly0.5https://docs.youseetoo.org/docs/Electronics/UC2-REST/INTROweekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e1weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e2weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e3weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e5weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e5.1weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e5.2weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e6weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e7weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e8weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e9weekly0.5https://docs.youseetoo.org/docs/ImSwitch/DahengCameraweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchClientweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchConfigweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallerweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallUbuntuweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallWindowsweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchUpdateweekly0.5https://docs.youseetoo.org/docs/introweekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightSheetweekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightSheet%20Sampleweekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightsheetCalibrationweekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightSheetOldweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Electronicsweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Illuminationweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Mainweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Resultsweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Softwareweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Stabilityweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/AlignLaserweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/FiveD_v1weekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/FiveD_v2weekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/FiveD_v3weekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/HistoScanweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/MCTPluginweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/ROIScannerweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/SetupPhasecontrastweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/SetupTubelensweekly0.5https://docs.youseetoo.org/docs/Investigator/ZMicroscope/UpackZMicroscopeweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscopeweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/APERTURESweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/BEAMSPLITTERweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/Cameraweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/Eyepieceweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/KIN_MIR_45weekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/KIN_MIR_90weekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/KIN_XY_LASERweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/LENSweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/MIR_45weekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/POLARIZER_ROTATINGweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/SAMPLE_HOLDErweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/STAGE_Z_MANUALweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/STAGE_Z_NEMAweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/TORCHweekly0.5https://docs.youseetoo.org/docs/Toolboxes/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCNweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreLensweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/coreMicroscopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreTelescopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/picturedTutorialweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxENweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/CoreIntroweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/CoreLinseweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/CoreMikroskopweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/CoreTeleskopweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/Opticsintroweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/Smartphone%20Microscopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/SPANISH/core_introweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/MicroscopyCoreweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/Automation_introweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/Camera%20Setupweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/seeedmicroscopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/spectrometerweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/XYZ_stage_micoweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryFluorescence/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/InlineHolographyweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/Interferometer_introweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometerweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/picturedTutorialweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorialweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/weekly0.5https://docs.youseetoo.org/docs/WORKSHOPS/weekly0.5https://docs.youseetoo.org/docs/WORKSHOPS/Workshop%20Nigeriaweekly0.5https://docs.youseetoo.org/weekly0.5 \ No newline at end of file +https://docs.youseetoo.org/blogweekly0.5https://docs.youseetoo.org/blog/archiveweekly0.5https://docs.youseetoo.org/blog/first-blog-postweekly0.5https://docs.youseetoo.org/blog/long-blog-postweekly0.5https://docs.youseetoo.org/blog/mdx-blog-postweekly0.5https://docs.youseetoo.org/blog/tagsweekly0.5https://docs.youseetoo.org/blog/tags/docusaurusweekly0.5https://docs.youseetoo.org/blog/tags/facebookweekly0.5https://docs.youseetoo.org/blog/tags/helloweekly0.5https://docs.youseetoo.org/blog/tags/holaweekly0.5https://docs.youseetoo.org/blog/welcomeweekly0.5https://docs.youseetoo.org/markdown-pageweekly0.5https://docs.youseetoo.org/searchweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/Encoderweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/Homeweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/INTROweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/LEDArrayweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/Motorweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/PinConfigweekly0.5https://docs.youseetoo.org/docs/Electronics/PS4-Controllerweekly0.5https://docs.youseetoo.org/docs/Electronics/UC2-REST/ESP32_Messaging_Callbackweekly0.5https://docs.youseetoo.org/docs/Electronics/UC2-REST/ESP32_Motorweekly0.5https://docs.youseetoo.org/docs/Electronics/UC2-REST/INTROweekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e1weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e2weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e3weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e5weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e5.1weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e5.2weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e6weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e7weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e8weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e9weekly0.5https://docs.youseetoo.org/docs/ImSwitch/DahengCameraweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchClientweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchConfigweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallerweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallUbuntuweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallWindowsweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchUpdateweekly0.5https://docs.youseetoo.org/docs/introweekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightSheetweekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightSheet%20Sampleweekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightsheetCalibrationweekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightSheetOldweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Electronicsweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Illuminationweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Mainweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Resultsweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Softwareweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Stabilityweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/AlignLaserweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/FiveD_v1weekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/FiveD_v2weekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/FiveD_v3weekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/HistoScanweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/MCTPluginweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/ROIScannerweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/SetupPhasecontrastweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/SetupTubelensweekly0.5https://docs.youseetoo.org/docs/Investigator/ZMicroscope/UpackZMicroscopeweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscopeweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/APERTURESweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/BEAMSPLITTERweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/Cameraweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/Eyepieceweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/KIN_MIR_45weekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/KIN_MIR_90weekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/KIN_XY_LASERweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/LENSweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/MIR_45weekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/POLARIZER_ROTATINGweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/SAMPLE_HOLDErweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/STAGE_Z_MANUALweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/STAGE_Z_NEMAweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/TORCHweekly0.5https://docs.youseetoo.org/docs/Toolboxes/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCNweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreLensweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/coreMicroscopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreTelescopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/picturedTutorialweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxENweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/CoreIntroweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/CoreLinseweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/CoreMikroskopweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/CoreTeleskopweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/Opticsintroweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/Smartphone%20Microscopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/SPANISH/core_introweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/MicroscopyCoreweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/Automation_introweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/Camera%20Setupweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/seeedmicroscopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/spectrometerweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/XYZ_stage_micoweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryFluorescence/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/InlineHolographyweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/Interferometer_introweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/MachZenderInterferometerweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometerweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorialweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/weekly0.5https://docs.youseetoo.org/docs/WORKSHOPS/weekly0.5https://docs.youseetoo.org/docs/WORKSHOPS/Workshop%20Nigeriaweekly0.5https://docs.youseetoo.org/weekly0.5 \ No newline at end of file