diff --git a/404.html b/404.html index e34737f17..412c75a9d 100644 --- a/404.html +++ b/404.html @@ -10,13 +10,13 @@ - - + +
Skip to main content

Page Not Found

We could not find what you were looking for.

Please contact the owner of the site that linked you to the original URL and let them know their link is broken.

- - + + \ No newline at end of file diff --git a/assets/images/trackpen-9cf48bfcc00e098078e888fd3f448fa2.gif b/assets/images/trackpen-9cf48bfcc00e098078e888fd3f448fa2.gif new file mode 100644 index 000000000..f73bc7590 Binary files /dev/null and b/assets/images/trackpen-9cf48bfcc00e098078e888fd3f448fa2.gif differ diff --git a/assets/js/27c3a159.9c12ed4b.js b/assets/js/27c3a159.9c12ed4b.js new file mode 100644 index 000000000..122d9181e --- /dev/null +++ b/assets/js/27c3a159.9c12ed4b.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkuc_2_docs=self.webpackChunkuc_2_docs||[]).push([[6938],{3905:(e,n,t)=>{t.d(n,{Zo:()=>c,kt:()=>g});var i=t(67294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);n&&(i=i.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,i)}return t}function a(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var l=i.createContext({}),p=function(e){var n=i.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):a(a({},n),e)),t},c=function(e){var n=p(e.components);return i.createElement(l.Provider,{value:n},e.children)},m={inlineCode:"code",wrapper:function(e){var n=e.children;return i.createElement(i.Fragment,{},n)}},u=i.forwardRef((function(e,n){var t=e.components,o=e.mdxType,r=e.originalType,l=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),u=p(t),g=o,d=u["".concat(l,".").concat(g)]||u[g]||m[g]||r;return t?i.createElement(d,a(a({ref:n},c),{},{components:t})):i.createElement(d,a({ref:n},c))}));function g(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var r=t.length,a=new Array(r);a[0]=u;var s={};for(var l in n)hasOwnProperty.call(n,l)&&(s[l]=n[l]);s.originalType=e,s.mdxType="string"==typeof e?e:o,a[1]=s;for(var p=2;p{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>a,default:()=>m,frontMatter:()=>r,metadata:()=>s,toc:()=>p});var i=t(87462),o=(t(67294),t(3905));const r={},a="Smart Microscopy Using openUC2 and ImSwitch",s={unversionedId:"Investigator/XYZMicroscope/SmartMicroscopy",id:"Investigator/XYZMicroscope/SmartMicroscopy",title:"Smart Microscopy Using openUC2 and ImSwitch",description:"This tutorial will guide you through setting up a smart microscopy workflow using the openUC2 microscope and the ImSwitch software. We will perform a closed-loop experiment where the microscope follows a line based on image processing results.",source:"@site/docs/02_Investigator/02_XYZMicroscope/SmartMicroscopy.md",sourceDirName:"02_Investigator/02_XYZMicroscope",slug:"/Investigator/XYZMicroscope/SmartMicroscopy",permalink:"/docs/Investigator/XYZMicroscope/SmartMicroscopy",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"openUC2 Setting up the tube lens",permalink:"/docs/Investigator/XYZMicroscope/SetupTubelens"},next:{title:"Stage Mapping and Stage Calibration",permalink:"/docs/Investigator/XYZMicroscope/StageCalibration"}},l={},p=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Closed-Loop Feedback Pipeline",id:"closed-loop-feedback-pipeline",level:2},{value:"Installation",id:"installation",level:3},{value:"Code Implementation",id:"code-implementation",level:3},{value:"Result",id:"result",level:3}],c={toc:p};function m(e){let{components:n,...r}=e;return(0,o.kt)("wrapper",(0,i.Z)({},c,r,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h1",{id:"smart-microscopy-using-openuc2-and-imswitch"},"Smart Microscopy Using openUC2 and ImSwitch"),(0,o.kt)("p",null,"This tutorial will guide you through setting up a smart microscopy workflow using the openUC2 microscope and the ImSwitch software. We will perform a closed-loop experiment where the microscope follows a line based on image processing results."),(0,o.kt)("h2",{id:"prerequisites"},"Prerequisites"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("strong",{parentName:"li"},"ImSwitch Software"),": Ensure that ImSwitch is running and accessible. For example, if running on the same computer, the URL might be ",(0,o.kt)("inlineCode",{parentName:"li"},"https://localhost:8002")," (check logs for the exact port)."),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("strong",{parentName:"li"},"SSL Certificate"),": Access the REST API (e.g., https://localhost:8002/docs) in a browser and accept the security warning to use the web viewer (",(0,o.kt)("a",{parentName:"li",href:"https://youseetoo.github.io/imswitch/index.html"},"https://youseetoo.github.io/imswitch/index.html"),"). Enter the URL and port under connections.")),(0,o.kt)("h2",{id:"closed-loop-feedback-pipeline"},"Closed-Loop Feedback Pipeline"),(0,o.kt)("p",null,"The pipeline will:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Snap an image"),(0,o.kt)("li",{parentName:"ol"},"Create a background image"),(0,o.kt)("li",{parentName:"ol"},"Subtract the background"),(0,o.kt)("li",{parentName:"ol"},"Compute edges using the Canny filter"),(0,o.kt)("li",{parentName:"ol"},"Perform Hough transform to find straight lines"),(0,o.kt)("li",{parentName:"ol"},"Determine the mean orientation of the lines"),(0,o.kt)("li",{parentName:"ol"},"Compute the next XY coordinate to move"),(0,o.kt)("li",{parentName:"ol"},"Return to the initial position")),(0,o.kt)("h3",{id:"installation"},"Installation"),(0,o.kt)("p",null,"Install the necessary package:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-sh"},"pip install https://github.com/openUC2/imswitchclient/archive/refs/heads/main.zip\n")),(0,o.kt)("h3",{id:"code-implementation"},"Code Implementation"),(0,o.kt)("p",null,"You can run the following code in a Jupyter notebook or Visual Studio Code. Adjust the client initialization to match your setup."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'# Load dependencies\nimport cv2\nimport numpy as np\nimport tifffile as tif\nimport matplotlib.pyplot as plt\nimport os\nimport imswitchclient.ImSwitchClient as imc\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport time\nfrom simple_pid import PID\n\n# Setup PID controller\ncontroller = PID(2, 0.1, 2)\ncontroller.send(None)\n\n# Initialize the client\nclient = imc.ImSwitchClient(host="192.168.137.1", port=8002)\n\n# Retrieve the first positioner\'s name and current position\npositioner_names = client.positionersManager.getAllDeviceNames()\npositioner_name = positioner_names[0]\ncurrentPositions = client.positionersManager.getPositionerPositions()[positioner_name]\ninitialPosition = (currentPositions["X"], currentPositions["Y"])\n\n# Loop through the process\nfor iimage in range(10):\n # Snap image\n scalingFactor = .5\n pixel_to_stage = 1 / scalingFactor\n gaussianKernel = 201\n print("Taking image")\n iImage = client.recordingManager.snapNumpyToFastAPI(scalingFactor)\n mCrop = np.max(iImage.shape)\n Ny, Nx = iImage.shape\n\n # Remove background\n mBackground = cv2.GaussianBlur(iImage, (gaussianKernel, gaussianKernel), 0)\n iImage = iImage / mBackground\n iImage = iImage[Nx//2-mCrop:Nx//2+mCrop, Ny//2-mCrop:Ny//2+mCrop]\n\n # Process image\n image = np.uint8(iImage * 255)[:, :, np.newaxis]\n image[image > 100] = 0\n edges = cv2.Canny(image, 50, 150, apertureSize=3)\n lines = cv2.HoughLines(edges, 1, np.pi / 180, 100)\n\n # Calculate main orientation\n angles = [np.degrees(theta) for rho, theta in lines[:, 0]] if lines is not None else []\n main_orientation = np.mean(angles)\n dy = np.cos(np.radians(main_orientation)) * Nx / 2\n dx = np.sin(np.radians(main_orientation)) * Ny / 2\n\n # Handle NaN values\n dx = dx if not np.isnan(dx) else np.random.randint(-100, 100)\n dy = dy if not np.isnan(dy) else np.random.randint(-100, 100)\n\n newPosition = (dx * pixel_to_stage, dy * pixel_to_stage)\n print(f"We are moving the microscope in x:/y: {round(newPosition[0], 2)} / {round(newPosition[1], 2)}")\n\n client.positionersManager.movePositioner(positioner_name, "X", newPosition[0], is_absolute=False, is_blocking=True)\n client.positionersManager.movePositioner(positioner_name, "Y", newPosition[1], is_absolute=False, is_blocking=True)\n\n# Return to the initial position\nclient.positionersManager.movePositioner(positioner_name, "X", initialPosition[0], is_absolute=True, is_blocking=True)\nclient.positionersManager.movePositioner(positioner_name, "Y", initialPosition[1], is_absolute=True, is_blocking=True)\n')),(0,o.kt)("h3",{id:"result"},"Result"),(0,o.kt)("p",null,"The microscope will follow a line for 10 steps and then return to the initial position."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"Result",src:t(22891).Z,width:"800",height:"518"})),(0,o.kt)("p",null,"This workflow demonstrates a basic smart microscopy setup using openUC2 and ImSwitch, allowing for closed-loop experiments based on real-time image processing. Adapt and expand this pipeline for your specific experiments and applications."))}m.isMDXComponent=!0},22891:(e,n,t)=>{t.d(n,{Z:()=>i});const i=t.p+"assets/images/trackpen-9cf48bfcc00e098078e888fd3f448fa2.gif"}}]); \ No newline at end of file diff --git a/assets/js/608ae6a4.36566353.js b/assets/js/608ae6a4.b4fe9852.js similarity index 75% rename from assets/js/608ae6a4.36566353.js rename to assets/js/608ae6a4.b4fe9852.js index 947701d70..1e9d07532 100644 --- a/assets/js/608ae6a4.36566353.js +++ b/assets/js/608ae6a4.b4fe9852.js @@ -1 +1 @@ -"use strict";(self.webpackChunkuc_2_docs=self.webpackChunkuc_2_docs||[]).push([[6938],{84545:s=>{s.exports=JSON.parse('{"permalink":"/blog/tags/docusaurus","page":1,"postsPerPage":10,"totalPages":1,"totalCount":4,"blogDescription":"Blog","blogTitle":"Blog"}')}}]); \ No newline at end of file +"use strict";(self.webpackChunkuc_2_docs=self.webpackChunkuc_2_docs||[]).push([[3011],{84545:s=>{s.exports=JSON.parse('{"permalink":"/blog/tags/docusaurus","page":1,"postsPerPage":10,"totalPages":1,"totalCount":4,"blogDescription":"Blog","blogTitle":"Blog"}')}}]); \ No newline at end of file diff --git a/assets/js/7f6a1755.5d17abb6.js b/assets/js/7f6a1755.5d17abb6.js new file mode 100644 index 000000000..30cfebd65 --- /dev/null +++ b/assets/js/7f6a1755.5d17abb6.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkuc_2_docs=self.webpackChunkuc_2_docs||[]).push([[822],{3905:(e,t,a)=>{a.d(t,{Zo:()=>o,kt:()=>g});var n=a(67294);function s(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function i(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function r(e){for(var t=1;t=0||(s[a]=e[a]);return s}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(s[a]=e[a])}return s}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):r(r({},t),e)),a},o=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},A=n.forwardRef((function(e,t){var a=e.components,s=e.mdxType,i=e.originalType,p=e.parentName,o=m(e,["components","mdxType","originalType","parentName"]),A=l(a),g=s,h=A["".concat(p,".").concat(g)]||A[g]||c[g]||i;return a?n.createElement(h,r(r({ref:t},o),{},{components:a})):n.createElement(h,r({ref:t},o))}));function g(e,t){var a=arguments,s=t&&t.mdxType;if("string"==typeof e||s){var i=a.length,r=new Array(i);r[0]=A;var m={};for(var p in t)hasOwnProperty.call(t,p)&&(m[p]=t[p]);m.originalType=e,m.mdxType="string"==typeof e?e:s,r[1]=m;for(var l=2;l{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>r,default:()=>c,frontMatter:()=>i,metadata:()=>m,toc:()=>l});var n=a(87462),s=(a(67294),a(3905));const i={},r="Stage Mapping and Stage Calibration",m={unversionedId:"Investigator/XYZMicroscope/StageCalibration",id:"Investigator/XYZMicroscope/StageCalibration",title:"Stage Mapping and Stage Calibration",description:"Stage Coordinates",source:"@site/docs/02_Investigator/02_XYZMicroscope/StageCalibration.md",sourceDirName:"02_Investigator/02_XYZMicroscope",slug:"/Investigator/XYZMicroscope/StageCalibration",permalink:"/docs/Investigator/XYZMicroscope/StageCalibration",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"Smart Microscopy Using openUC2 and ImSwitch",permalink:"/docs/Investigator/XYZMicroscope/SmartMicroscopy"},next:{title:"Stage Scanning and Image Stitching (ASHLAR)",permalink:"/docs/Investigator/XYZMicroscope/StageScanning"}},p={},l=[{value:"Stage Coordinates",id:"stage-coordinates",level:2},{value:"Alignment of Axes",id:"alignment-of-axes",level:3},{value:"Understanding Axes in NumPy",id:"understanding-axes-in-numpy",level:3},{value:"Stage Coordinate System",id:"stage-coordinate-system",level:3},{value:"Aligning Axes with Imswitch",id:"aligning-axes-with-imswitch",level:3},{value:"Steps for Aligning Axes:",id:"steps-for-aligning-axes",level:4},{value:"Stage Calibration",id:"stage-calibration",level:2},{value:"Interpreation of the Matrix",id:"interpreation-of-the-matrix",level:2},{value:"Calibration Matrix image_to_stage_displacement",id:"calibration-matrix-image_to_stage_displacement",level:3},{value:"Entries and Their Names",id:"entries-and-their-names",level:4},{value:"Summary of the Matrix Entries",id:"summary-of-the-matrix-entries",level:3},{value:"Interpretation of the Values",id:"interpretation-of-the-values",level:2},{value:"1. image_to_stage_displacement Matrix",id:"1-image_to_stage_displacement-matrix",level:3},{value:"2. backlash_vector Matrix",id:"2-backlash_vector-matrix",level:3}],o={toc:l};function c(e){let{components:t,...i}=e;return(0,s.kt)("wrapper",(0,n.Z)({},o,i,{components:t,mdxType:"MDXLayout"}),(0,s.kt)("h1",{id:"stage-mapping-and-stage-calibration"},"Stage Mapping and Stage Calibration"),(0,s.kt)("h2",{id:"stage-coordinates"},"Stage Coordinates"),(0,s.kt)("p",null,"In this tutorial, we will guide you through the process of aligning the coordinate systems for the UC2 microscope stage. Proper alignment ensures that the movement of the stage corresponds accurately with the image displayed on the screen, facilitating an intuitive user experience. In principle all of this can be handled in software (e.g. flipping the camera image, changing stage axis), but it's always good to start with a common ground from the hardware side."),(0,s.kt)("p",null,(0,s.kt)("img",{src:a(14006).Z,width:"1792",height:"948"}),"\n",(0,s.kt)("em",{parentName:"p"},"This is the microscope (UC2 XYZ v3) with the ingredients controlled by ImSwitch")),(0,s.kt)("h3",{id:"alignment-of-axes"},"Alignment of Axes"),(0,s.kt)("p",null,"The goal of aligning the coordinate systems is to ensure they are correctly matched. The alignment of the stage is considered from the origin point (zero point). The desired behavior is as follows:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},"When the stage moves to the right (x+), the image on the screen should also move to the right."),(0,s.kt)("li",{parentName:"ul"},"When the stage moves upwards (y+), the image on the screen should move upwards as well.")),(0,s.kt)("p",null,(0,s.kt)("img",{src:a(95223).Z,width:"3217",height:"751"})),(0,s.kt)("p",null,"This is illustrated in the following Figure. When viewing the sample from above with the microscope positioned in front, the image should match what is shown in Imswitch.\nThis is also represented by the ",(0,s.kt)("inlineCode",{parentName:"p"},"VirtualMicroscope")," with the ",(0,s.kt)("inlineCode",{parentName:"p"},"VirtualStage")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"VirtualCamera")," in this config (Config: ",(0,s.kt)("a",{parentName:"p",href:"https://github.com/openUC2/ImSwitchConfig/blob/master/imcontrol_setups/example_virtualmicroscope.json"},"https://github.com/openUC2/ImSwitchConfig/blob/master/imcontrol_setups/example_virtualmicroscope.json"),")."),(0,s.kt)("h3",{id:"understanding-axes-in-numpy"},"Understanding Axes in NumPy"),(0,s.kt)("p",null,"It's important to note the labeling of axes. In NumPy, x = 1 and y = 0. This means:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},"The x-axis is the second axis (index 1) of an array."),(0,s.kt)("li",{parentName:"ul"},"The y-axis is the first axis (index 0) of an array.")),(0,s.kt)("p",null,(0,s.kt)("img",{src:a(55828).Z,width:"1272",height:"652"})),(0,s.kt)("p",null,"NumPy arrays are multidimensional, with axes numbered as follows:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},"Axis 0 is the first axis (often the vertical direction)."),(0,s.kt)("li",{parentName:"ul"},"Axis 1 is the second axis (often the horizontal direction).")),(0,s.kt)("h3",{id:"stage-coordinate-system"},"Stage Coordinate System"),(0,s.kt)("p",null,"When viewing the stage from above, the coordinate system is arranged as follows:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"X-Axis (Horizontal)"),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},"Positive direction: Right"),(0,s.kt)("li",{parentName:"ul"},"Negative direction: Left"))),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Y-Axis (Vertical)"),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},"Positive direction: Up"),(0,s.kt)("li",{parentName:"ul"},"Negative direction: Down")))),(0,s.kt)("p",null,(0,s.kt)("img",{src:a(6161).Z,width:"857",height:"646"})),(0,s.kt)("h3",{id:"aligning-axes-with-imswitch"},"Aligning Axes with Imswitch"),(0,s.kt)("p",null,'To enable intuitive operation, the stage and camera axes must be correctly aligned with the coordinate system in Imswitch. To achieve this, the commands "flip x" and "flip y" are used. These commands invert the direction of the axes in the coordinate system, meaning that movement or position along the axes is reversed.'),(0,s.kt)("h4",{id:"steps-for-aligning-axes"},"Steps for Aligning Axes:"),(0,s.kt)("ol",null,(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"Initial Setup:")),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},"Ensure the microscope and stage are properly connected to the control software (e.g., Imswitch)."))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"Define Origin:")),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},"Identify the origin (zero point) of the stage coordinate system. (in Hardware this would be defined by the Endstops that are used for homing the axes; The motor will run - if the direction is set correctly - until it hits the switch)"))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"Test Movement:")),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},"Move the stage to the right and observe the direction of the image on the screen.",(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},'If the image moves left, apply the "flip x" command.'))),(0,s.kt)("li",{parentName:"ul"},"Move the stage upwards and observe the direction of the image on the screen.",(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},'If the image moves down, apply the "flip y" command.'))))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"Adjust Axes:")),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("p",{parentName:"li"},"Use the following commands as needed to align the axes:"),(0,s.kt)("pre",{parentName:"li"},(0,s.kt)("code",{parentName:"pre",className:"language-python"},"# Flip the x-axis if necessary\nif x_movement_incorrect:\n stage.flip_x()\n\n# Flip the y-axis if necessary\nif y_movement_incorrect:\n stage.flip_y()\n"))))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"Verify Alignment:")),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},"After applying the flips, verify that the stage movements correspond correctly with the image movements on the screen."))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"Save Configuration:")),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},"Save the configuration settings to ensure the alignment persists across sessions.")))),(0,s.kt)("h2",{id:"stage-calibration"},"Stage Calibration"),(0,s.kt)("p",null,"Richard Bowman and his team provided a very nice way to calibrate stage coordinates to camera pixel coordinates. We burtally integrated the open-source software which you can find here: ",(0,s.kt)("a",{parentName:"p",href:"https://gitlab.com/openflexure/microscope-extensions/camera-stage-mapping"},"https://gitlab.com/openflexure/microscope-extensions/camera-stage-mapping")," into ImSwitch. If you activate the ",(0,s.kt)("inlineCode",{parentName:"p"},"HistoScan")," Controller and Widget you can start it either by the GUI or using the HTTP interface by calling http://localhost:8002/HistoScanController/startStageMapping (URL and PORT may differ). What the stage will do is moving a certain series of steps in XY, performs a cross-correlation of the images and computes the shift in XY of the mciroscope image on the camera, compares it to the expected shift on and returns the Image-To-Stage-Displacement Matrix as well as the Backlashvector. Both matrices/vectors are microscope specificand will help you matching e.g. stage coordinates for stitching software such as ASHLAR or OFM Stitching. This document should give you a rough idea of what's happening."),(0,s.kt)("p",null,"Some terminology:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"Combine X and Y calibrations"),": The calibration involves combining two separate measurements or calibration runs for the x and y directions, ensuring that the directions are orthogonal (at right angles to each other).")),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"2x2 transformation matrix"),": The ",(0,s.kt)("inlineCode",{parentName:"p"},"image_to_stage_displacement")," matrix maps image displacements to stage displacements. This ensures that movements in the image coordinate system are accurately translated to movements in the stage coordinate system.")),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"backlash_vector"),": This is a vector estimating the backlash (mechanical slack or play in the system) in each direction. In this case, the estimated backlash is zero, indicating a precise calibration with no noticeable mechanical play.")),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"backlash"),": The function is expected to return the highest element of ",(0,s.kt)("inlineCode",{parentName:"p"},"backlash_vector")," as a scalar value, which would be zero in this case."))),(0,s.kt)("h2",{id:"interpreation-of-the-matrix"},"Interpreation of the Matrix"),(0,s.kt)("h3",{id:"calibration-matrix-image_to_stage_displacement"},"Calibration Matrix ",(0,s.kt)("inlineCode",{parentName:"h3"},"image_to_stage_displacement")),(0,s.kt)("p",null,"The entries of the calibration matrix ",(0,s.kt)("inlineCode",{parentName:"p"},"image_to_stage_displacement")," can be given specific names and meanings based on their positions within the matrix. Let's denote the matrix as follows:"),(0,s.kt)("div",{className:"math math-display"},(0,s.kt)("span",{parentName:"div",className:"katex-display"},(0,s.kt)("span",{parentName:"span",className:"katex"},(0,s.kt)("span",{parentName:"span",className:"katex-mathml"},(0,s.kt)("math",{parentName:"span",xmlns:"http://www.w3.org/1998/Math/MathML",display:"block"},(0,s.kt)("semantics",{parentName:"math"},(0,s.kt)("mrow",{parentName:"semantics"},(0,s.kt)("mtext",{parentName:"mrow"},"image_to_stage_displacement"),(0,s.kt)("mo",{parentName:"mrow"},"="),(0,s.kt)("mrow",{parentName:"mrow"},(0,s.kt)("mo",{parentName:"mrow",fence:"true"},"("),(0,s.kt)("mtable",{parentName:"mrow",rowspacing:"0.16em",columnalign:"center center",columnspacing:"1em"},(0,s.kt)("mtr",{parentName:"mtable"},(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mn",{parentName:"mstyle"},"0"))),(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mrow",{parentName:"mstyle"},(0,s.kt)("mo",{parentName:"mrow"},"\u2212"),(0,s.kt)("mn",{parentName:"mrow"},"1.0"))))),(0,s.kt)("mtr",{parentName:"mtable"},(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mrow",{parentName:"mstyle"},(0,s.kt)("mo",{parentName:"mrow"},"\u2212"),(0,s.kt)("mn",{parentName:"mrow"},"1.0")))),(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mn",{parentName:"mstyle"},"0"))))),(0,s.kt)("mo",{parentName:"mrow",fence:"true"},")"))),(0,s.kt)("annotation",{parentName:"semantics",encoding:"application/x-tex"},"\\text{image\\_to\\_stage\\_displacement} = \\begin{pmatrix} 0 & -1.0 \\\\ -1.0 & 0 \\end{pmatrix}")))),(0,s.kt)("span",{parentName:"span",className:"katex-html","aria-hidden":"true"},(0,s.kt)("span",{parentName:"span",className:"base"},(0,s.kt)("span",{parentName:"span",className:"strut",style:{height:"1.0044em",verticalAlign:"-0.31em"}}),(0,s.kt)("span",{parentName:"span",className:"mord text"},(0,s.kt)("span",{parentName:"span",className:"mord"},"image_to_stage_displacement")),(0,s.kt)("span",{parentName:"span",className:"mspace",style:{marginRight:"0.2778em"}}),(0,s.kt)("span",{parentName:"span",className:"mrel"},"="),(0,s.kt)("span",{parentName:"span",className:"mspace",style:{marginRight:"0.2778em"}})),(0,s.kt)("span",{parentName:"span",className:"base"},(0,s.kt)("span",{parentName:"span",className:"strut",style:{height:"2.4em",verticalAlign:"-0.95em"}}),(0,s.kt)("span",{parentName:"span",className:"minner"},(0,s.kt)("span",{parentName:"span",className:"mopen delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size3"},"(")),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mtable"},(0,s.kt)("span",{parentName:"span",className:"col-align-c"},(0,s.kt)("span",{parentName:"span",className:"vlist-t vlist-t2"},(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"1.45em"}},(0,s.kt)("span",{parentName:"span",style:{top:"-3.61em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord"},"0"))),(0,s.kt)("span",{parentName:"span",style:{top:"-2.41em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord"},"\u2212"),(0,s.kt)("span",{parentName:"span",className:"mord"},"1.0")))),(0,s.kt)("span",{parentName:"span",className:"vlist-s"},"\u200b")),(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.95em"}},(0,s.kt)("span",{parentName:"span"}))))),(0,s.kt)("span",{parentName:"span",className:"arraycolsep",style:{width:"0.5em"}}),(0,s.kt)("span",{parentName:"span",className:"arraycolsep",style:{width:"0.5em"}}),(0,s.kt)("span",{parentName:"span",className:"col-align-c"},(0,s.kt)("span",{parentName:"span",className:"vlist-t vlist-t2"},(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"1.45em"}},(0,s.kt)("span",{parentName:"span",style:{top:"-3.61em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord"},"\u2212"),(0,s.kt)("span",{parentName:"span",className:"mord"},"1.0"))),(0,s.kt)("span",{parentName:"span",style:{top:"-2.41em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord"},"0")))),(0,s.kt)("span",{parentName:"span",className:"vlist-s"},"\u200b")),(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.95em"}},(0,s.kt)("span",{parentName:"span"}))))))),(0,s.kt)("span",{parentName:"span",className:"mclose delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size3"},")")))))))),(0,s.kt)("div",{className:"math math-display"},(0,s.kt)("span",{parentName:"div",className:"katex-display"},(0,s.kt)("span",{parentName:"span",className:"katex"},(0,s.kt)("span",{parentName:"span",className:"katex-mathml"},(0,s.kt)("math",{parentName:"span",xmlns:"http://www.w3.org/1998/Math/MathML",display:"block"},(0,s.kt)("semantics",{parentName:"math"},(0,s.kt)("mrow",{parentName:"semantics"},(0,s.kt)("mo",{parentName:"mrow",fence:"true"},"("),(0,s.kt)("mtable",{parentName:"mrow",rowspacing:"0.16em",columnalign:"center center",columnspacing:"1em"},(0,s.kt)("mtr",{parentName:"mtable"},(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mi",{parentName:"mstyle"},"a"))),(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mi",{parentName:"mstyle"},"b")))),(0,s.kt)("mtr",{parentName:"mtable"},(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mi",{parentName:"mstyle"},"c"))),(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mi",{parentName:"mstyle"},"d"))))),(0,s.kt)("mo",{parentName:"mrow",fence:"true"},")")),(0,s.kt)("annotation",{parentName:"semantics",encoding:"application/x-tex"},"\\begin{pmatrix} a & b \\\\ c & d \\end{pmatrix}")))),(0,s.kt)("span",{parentName:"span",className:"katex-html","aria-hidden":"true"},(0,s.kt)("span",{parentName:"span",className:"base"},(0,s.kt)("span",{parentName:"span",className:"strut",style:{height:"2.4em",verticalAlign:"-0.95em"}}),(0,s.kt)("span",{parentName:"span",className:"minner"},(0,s.kt)("span",{parentName:"span",className:"mopen delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size3"},"(")),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mtable"},(0,s.kt)("span",{parentName:"span",className:"col-align-c"},(0,s.kt)("span",{parentName:"span",className:"vlist-t vlist-t2"},(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"1.45em"}},(0,s.kt)("span",{parentName:"span",style:{top:"-3.61em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal"},"a"))),(0,s.kt)("span",{parentName:"span",style:{top:"-2.41em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal"},"c")))),(0,s.kt)("span",{parentName:"span",className:"vlist-s"},"\u200b")),(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.95em"}},(0,s.kt)("span",{parentName:"span"}))))),(0,s.kt)("span",{parentName:"span",className:"arraycolsep",style:{width:"0.5em"}}),(0,s.kt)("span",{parentName:"span",className:"arraycolsep",style:{width:"0.5em"}}),(0,s.kt)("span",{parentName:"span",className:"col-align-c"},(0,s.kt)("span",{parentName:"span",className:"vlist-t vlist-t2"},(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"1.45em"}},(0,s.kt)("span",{parentName:"span",style:{top:"-3.61em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal"},"b"))),(0,s.kt)("span",{parentName:"span",style:{top:"-2.41em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal"},"d")))),(0,s.kt)("span",{parentName:"span",className:"vlist-s"},"\u200b")),(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.95em"}},(0,s.kt)("span",{parentName:"span"}))))))),(0,s.kt)("span",{parentName:"span",className:"mclose delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size3"},")")))))))),(0,s.kt)("h4",{id:"entries-and-their-names"},"Entries and Their Names"),(0,s.kt)("ol",null,(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"a (0)"),":"),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Name:")," ",(0,s.kt)("inlineCode",{parentName:"li"},"a")),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Meaning:")," Represents the scaling factor from the x-coordinate in the image to the x-coordinate in the stage. Here, it is 0, indicating no direct mapping from image x to stage x."))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"b (-1.0)"),":"),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Name:")," ",(0,s.kt)("inlineCode",{parentName:"li"},"b")),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Meaning:")," Represents the scaling factor from the y-coordinate in the image to the x-coordinate in the stage. The value -1.0 indicates an inverse and slightly scaled mapping from image y to stage x."))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"c (-1.0)"),":"),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Name:")," ",(0,s.kt)("inlineCode",{parentName:"li"},"c")),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Meaning:")," Represents the scaling factor from the x-coordinate in the image to the y-coordinate in the stage. The value -1.0 indicates an inverse and slightly scaled mapping from image x to stage y."))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"d (0)"),":"),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Name:")," ",(0,s.kt)("inlineCode",{parentName:"li"},"d")),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Meaning:")," Represents the scaling factor from the y-coordinate in the image to the y-coordinate in the stage. Here, it is 0, indicating no direct mapping from image y to stage y.")))),(0,s.kt)("h3",{id:"summary-of-the-matrix-entries"},"Summary of the Matrix Entries"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"a (0)"),": No direct mapping from image x to stage x."),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"b (-1.0)"),": Inverse mapping from image y to stage x."),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"c (-1.0)"),": Inverse mapping from image x to stage y."),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"d (0)"),": No direct mapping from image y to stage y.")),(0,s.kt)("p",null,"This calibration matrix indicates that there is a transformation involving a 90-degree rotation combined with an inverse scaling factor slightly above 1 between the image coordinates and the stage coordinates. The exact interpretation may depend on the specific application, but generally, it implies that movements in one direction in the image are mapped to movements in the perpendicular direction on the stage with a slight scaling adjustment."),(0,s.kt)("h2",{id:"interpretation-of-the-values"},"Interpretation of the Values"),(0,s.kt)("p",null,"The following simulation of the ",(0,s.kt)("inlineCode",{parentName:"p"},"VirtualMicroscope")," inside ImSwitch (Config: ",(0,s.kt)("a",{parentName:"p",href:"https://github.com/openUC2/ImSwitchConfig/blob/master/imcontrol_setups/example_virtualmicroscope.json"},"https://github.com/openUC2/ImSwitchConfig/blob/master/imcontrol_setups/example_virtualmicroscope.json"),"):"),(0,s.kt)("p",null,(0,s.kt)("img",{src:a(15346).Z,width:"480",height:"311"})),(0,s.kt)("p",null,"The result of the stage mapping is a json file containing (under ",(0,s.kt)("inlineCode",{parentName:"p"},"/ImSwitch/calibFile.json"),") the following important element:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-json"},' "camera_stage_mapping_calibration": {\n "backlash": 0.0,\n "backlash_vector": [\n 0.0,\n 0.0,\n 0.0\n ],\n "image_to_stage_displacement": [\n [\n 0.0,\n -1.0\n ],\n [\n -1.0,\n 0.0\n ]\n ]\n }\n')),(0,s.kt)("p",null,"The provided matrices explains transforming image coordinates to stage coordinates and estimating backlash. Let's break down the interpretation of the entries:"),(0,s.kt)("h3",{id:"1-image_to_stage_displacement-matrix"},"1. ",(0,s.kt)("inlineCode",{parentName:"h3"},"image_to_stage_displacement")," Matrix"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'mData["camera_stage_mapping_calibration"]["image_to_stage_displacement"] =\narray([[ 0. , -1.00135997],\n [-1.00135997, 0. ]])\n')),(0,s.kt)("p",null,"This matrix is a 2x2 transformation matrix used to map image coordinates to stage coordinates. Each entry in this matrix has a specific meaning:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"[","0,0","]"," = 0"),": There is no direct transformation of the x-coordinate in the image to the x-coordinate in the stage."),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"[","0,1","]"," = -1.00135997"),": The y-coordinate in the image inversely affects the x-coordinate in the stage."),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"[","1,0","]"," = -1.00135997"),": The x-coordinate in the image inversely affects the y-coordinate in the stage."),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"[","1,1","]"," = 0"),": There is no direct transformation of the y-coordinate in the image to the y-coordinate in the stage.")),(0,s.kt)("p",null,"The presence of -1.00135997 off-diagonal elements indicates that the transformation involves a negative and approximately unit scaling between the coordinates, implying a possible 90-degree rotation combined with a scaling factor close to -1."),(0,s.kt)("h3",{id:"2-backlash_vector-matrix"},"2. ",(0,s.kt)("inlineCode",{parentName:"h3"},"backlash_vector")," Matrix"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'mData["camera_stage_mapping_calibration"]["backlash_vector"] =\narray([ 0., 0., 0.])\n')),(0,s.kt)("p",null,"This vector represents the estimated backlash in each direction (x, y, and possibly z, though z is not utilized in a 2D context). Here, all elements are zero, indicating no measurable backlash in the x and y directions."))}c.isMDXComponent=!0},15346:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/StageMapping1-c4576ec64061a4416996e558150edf24.gif"},95223:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/StageMapping2-d11998287b8c6da911a5e7300276200f.png"},6161:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/StageMappingSampleView-0688332923e7a42a80b02895e1013c90.png"},14006:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/StageOverview-4d690420657841eae78311f94bd0ece2.png"},55828:(e,t,a)=>{a.d(t,{Z:()=>n});const n="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAABPgAAAKMCAAAAABTT1mrAAAmRUlEQVR42u3deXhM9+LH8c9kEXspilqrSJAiCImlQmir2qo92lJSqq3b1dLl9keri1C6aqu0llJrW62lFVtL1b5EEUuCIPYlSpCQ5Pz+mMlksiGWOzM579fz3Oeec2bmOP2e8fY9M5OJxRAAmIsHQwCA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AAgfABA+ACB8AED4AIDwAQDhAwDCBwCEDwAIHwAQPgAgfABA+ACA8AEA4QMAwgeA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AAgfABA+ACB8AED4AIDwAQDhAwDCBwCEDwAIHwAQPgAgfABA+ACA8AEA4QMAwgeA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AAgfABA+5FOp+xkDgPCZy8yq1e5bxzAAksVgDEziStUj0oOLruOeO/fYF+vew8AhH/JiCMzi+BFJO67nntM+tC9++QIDBy514b4q3i/pScYBYMZnJlO/jA7pdz13rMJYIZ/jNT5klyJJWvYQl7pgxgeTPSt4biDf4jU+AIQPALjUhevat3LNkdPehatXr9XG/i/Ym3HSf/0z7mMMPiy9HCRp9RdShP2Ni8SfYw8cPH13tWr3BFVgJEH44CY2DVluX27wWfP0OfxMacuGYvZbRoyR6teTpLUzpddt4Uv44vPTkrRNkleHF1symuBSF24g7snAjO5pc4v+tqXhodLuvvYblg+VSvxYKOtU8b5hp+0rKT+FPMB4ghkfXN+u+smSVNbf/464XesljW8VJknynNEgXrObvWS925EeqbJMuTfLg8+GHpZUo+3dReP37f3HkJoxoCB8cH01626Qz8BXykiSVry6RXqpbSlJUpnZLa9oUOMgSUrpfkJ6/bGsD54YJ5Wa+pBFkrR95MxSAxlQcKkLNzhxX3l03vmBtXtqubqmdHKs7abgMdKVbqck6Y1VUqv3sz14smT5rZ21e/KfumdBUQYUhA9uoNGRHzO+OaXgdxZpe/raiz2kQ0+mSXPHSBVmemZ77D6pTuOM1XsaMZwgfHALZR1XmleWdtvXJtSWFr+n2D6S9+y7cjzr3gwgCB/cXS0pJi19pchPxaThv3b5V/qoaQ53vlfa8hNjBsIHN1ddSkqwr/l9J6U9vlXq9nJOd24uqUv3WEYNZsW7um4sLfrQoUNnrd+vs0qSwzftdH3l0/QA5mDo1H+l2XPqNGvQ6D6ueUH44D5SZ3ywK/dbP1q6XdLPOb9dW2bjs39IxvbtUsEGrR5twmCCS124hYT6Pa/SPZ0+JeX+TfPVl39r+wHdpNUfBD1xguEEMz64gZRu2yU1bXd3+QKSpK8zv1mRGnZMksL9/XJ5/DPP7Phz/fo9aZI0Y9HkxxhRED64vMVLpVK/ZXwYb2Hmm//7p2QxdL7T+lw/m1ynzgBd2Lpq2jYpoU/MnQwpuNSFq/tV0uzGud46Siq5t4e0s+9V91Kk6ZCo6WWlM7MYURA+uLwNUolWDuv/Ot64t7chy5R7xvtJsz691jOgx0RJexhRED64g0IWh9T94HBDUpez0uBHVXROYWnwqmvtp4GkkwwnCB9cXj3paJx97cqAZIfbBkRJ938oyf8rKaXbsWwPnvbNFYe19ZL4EmYQPri+QEk90muX+Eikw00TJ0plrV9N8HS4dLR7SpbHHn3+uZoT7Rv3PS8piBEF4YPL61BaWhs444qkkxG1FjvcEjVA8pxR3roytq608o0sj307UXHP+PYe+eueEzt+71P3iFSrAyMKM+HjLG6qwqwHUrXtiZ53p106I8mn6zTbDWe7JEnvpr/vUWhOo/MaE9Ql02PLe6RJ+/ZlbCgzg38AwYwPbqD19EqSUg8dPiOp9qr0Hzszeu+V2r1lv1vNbyWFZ/4Zj/f/esDhfRFZ2q2sx3iC8MEddIv5vMEdklSg3eTNjUrati74Vao8zaFr3QZI51+VpEKSpYQkqWnk3hHtrY8o6td/x285/3hHSdv/gHzHYjAG7uzMAc9ypW/sXy/j39OpVyqUYAxB+ACAS10AIHwAQPgAgPABAOEDAMIH4LodZAgIH2A24SMOMQi3Gp/jA1xbqCytn6rMOBA+wFThE+kjfID5wkf6CB9gwvCRPsIHmDB8pI/wASYMH+kjfIAJw0f6CB9gwvCRPsIHmDB8kqVVT9JH+ABzhY/0ET7AhOEjfYQPMGH4SB/hA0wYPtJH+AATho/0ET7+KsCcf4NJH+EjfCB9IHyED2b4WxzSi/QRPsIH0gfCR/hA+uCI37kB5AsGc5g88GIIAPef77XsVYVRIHwA2QPhA8geCB9A9kw8bLwiCri0ULLHjA8A2SN8gFmzF9KT7BE+gOyB8AFkD4QPIHsgfADZI3wAyB7hA0D2CB8Askf4AJA9wgeA7BE+AGSP8AEge4QPwFWzx2/TuPWDytdSAS7tINkjfABw0/gtawAIHwAQPgAgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHgPABAOEDAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhAwDCBwCEDwAIHwAQPgAgfAAIHwAQPgAgfABA+ACA8AEA4QMAwgcAhA95tmIFB8FQuOT5IHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AED4ABA+hgAA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AED4ABA+ACB8AED4AIDwAQDhAwDCBwCEDwCcyYshMJlL65Ik6R7f67p36s+rNxQPfKCZUw9CSlpy9FlnD8XuqVuPVq7/YBOnHsTeGXtiC9YK6FWAJ/JNMuB2/vzzJh483Hre+1zXnRMetN77tcvOPIhpnYuogbOH4l3rnS0vnHfeQRzpZZuo+P5xaw/CfJjxmc3jcSlS3Mrrm2gF77qzT9tzkVM+PjLDWQcxefqfV5w/FKnPT/B4smvlg0u/+mrtmgJOOoj9bfZ5P9u4rrF1xO4H1wbwVGbGx4wvjyIVfj13+0SF1hqGYSzw1lInHcRxyaP5mL9vy4wvD0PxvEpvNAzDMDZX0svOOoheqrXJMAzDuNhZNZOZ8d0M3txAri6M0PdNJKn9kxrupGO4a9yko3+9dq+TR2LfeI/IhpKkgOmen8U76Sie/e+GBpKkQpOq7Ini+XkzCB9y9fUJ3y7WpcGWlXuddBD9e9/l/JFYkdqpgW2x+aNa66SjaPZ+EdtSsfbawPOT8OG2WK9w21LtB/SzmUci+KmMGW+A/nH+ARXQRZ6fN4M3N/IJY9bqrdu9atdq9+iByKd90reuWhgVdaVe/bYP3dgFnrqmLzaJjHPSQbjEUPhNzVg+qbJOH4qk3+XHc/7mniXID29u7A1JP6FtH9Ji28bE5y22jWGnbuTF9JKeKemLP+gxJx2EYRiGcSwPb27cvqMwDMMwjpbQ384+iMvhqsibG7y5gaV1/7xn/LrExB0fll2ySBesGw/V/drjlSUnz/zxts/MOtvzvtOzCeU905d9ddg5B+EaQ+EwUXj+bJumzjyIhL0bJjWYWGYGH2Fmxmf6Gd/Ziup21rp4vqU01zAMw0hrI9+11o07Gqpecp5nGHEKtC+fUR3nHEReZ3y38ygMwzBeVZHdzjyIU96SVDmODzDzAWYMim85y7ZYdGHlM9alr5aWWFrR9t7EUv+t73yY++M3L8+06v2yJMkib4cX06845yBcYyjs3vrEa05NZx5E8Q6rj0inl4XzrGfGZ/YZ378Wj6iMtQGy3lxRUx2mFBafy7nPMD7I8UlxQE3t90hUTeccRB5nfLf1KAzjDXnOcvpBxE1vLL3DjI/X+Mxuu9GmXsbae4tbSNLJ+ApPZWx8oH5ydO478Mhx1VCSfdNlZx2EawyFbUD+E+E5tZucPRRVeqx8Wu/8yvP+ZnCpmw/8o7oOayXbWi+XlOmnOQO2bKmX6w4GZv7qE9tbGnco1r7piEo65yBcYygkSSnhU72nd3HyQUiSfCaXjxjagSc+4TO3faqcfeOhzBtL6VTuO/C+M6etJYqeO1bOthyvas45CNcYCklKDvvFZ86jTj6IdEO/++dQJZ75N45L3XygqvZn3+ir3Y6r8bo7z/utrD3pi7HXDN/tOgjXGArpwiO/FJr3qJMPwq5QI8XwxCd85lZXW7NvrG/ZkuKwulGN8rzf6lqVvjjzmuG7XQfhGkOhhDZLi/72gJMPwsFxFeKJT/jM7T7LCsefm089LknF/M44fGBifEyJGnneb1d9kWxd2rPKo6mTDsI1huJ4yNo7l4Q4+SAcHN5apBFPfMJnbnf0T+2T8Qbs6oDyhyRpjN7blL5t70CNsuR5v52KHZtiXRqmHn5OOgiXGIqDLf6ptCrIupy25pJzDmLzZPsfnNI9tY03T/ybwSd68sFPbpyrosBtts+QPW9RtYuGYRjGsyr+nXXjT3ep/Y38bOj7KvuPYRjG2/KKcdpB5O0nN27LUSRUUu1D6Su/6QPnDEVHVZhg/eHp+EdV6iCf47sZFoP2u50Vaplly6pOJ737BNa+sG3r4mNq+ksZSVJit9/V+uH6nv8sna/gudavFDFe2ytJhzeXbSxJRd656u+4SW4QXbxzm3MLFuqNEU46iMsvHZF0YXmhNpK873/ZKUext7qa2d9o3bdj4GinDMXGsL3ye6hOtcNRE84X/vnBa44EmPHl8xmfYZzskX5Cg+am2bd+W9y6zWdUqm3LpYKZT//Uq/9JZ60f1/X6wmkHcSLTizGBzjmKLG+gDnTS+Uh8u7Ttbg/u42d1mfEx45OkjRu3HUyrXr1RcKbX5FdGbbPUDGie8RGyI6cd31sscs2fO12xeqdH49Z+zjuI+NNpGSs1izjlKNK2ZvprUvEuZ52PlL+XHLxUyj+g2XWeD+SG8OWX8JnyIBgKwndjeFcXAOEDAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhAwDCBwCEDwAIHwAQPgAgfABA+AAQPgAgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAOEDAMIHAIQPAAgfALghL4Ygb1a4wkGEcB6QxZ8MAeEDzCZEBoNw/SwuN1qbJhUNr3nb/5RQ+9IyNzxpinb+QdR2hYNwjaNwkYMgfO4841seKo3ZV+kGHmlYOJ0ArofLvbkxXlLKxLw+asWL7WoUKFq/8//FcUoBuN2Mr7gklcnbYxZ8uEaSUrZu/TniiTdqcVYBuNeM7wUfqdRTeXlEap9H19hXUr6v+wNnFYB7zfjqxy/07FQ4Dw9IDvtFkor6VkuKiU2RUnqeeZHzCsCdwqfST+ft/l0WSLpryAuFJB38YNIVGS95P8eJBeBGl7p5NWeBpPtjBxaSpMrfbCgtachRTiyA/Bu+CwMlNf+tWPp6vaWlpPMDObEA8m/4Rh6Sis8tkrGh3nhJM1ZxZgHk6ra/xnd0xcrdp0573Vu9eqNW9o1f/io1ec/hXqn9D0ph4ZKMsAT5fmG/4fz0XQcPHClerVq16q2L5rD7qZJeL+24pVPwGun75pxaAE4K35Ghk1MlSQeWS20/rW3b7PdSmpbU7Jlxv6HfSeWmSJIxWzqbvvnM558nSNLR3ZIKPvx646z73xYnlX0l87YRIdJ8fowDgHMudc+9XeO71IzVJfXety2FDpf03Hb7LQtHSJ4zy2d9fEyddxMy1pJ+bvJF1nvMk9Q+y4df7i8rHVvHqQXgjBnfvzVOSpLu9q95IXZ7gpQyNMR2CfrW2gW62GWD7T2JuF6GNKJl1scfb3NMUpG2Fcuc3rcv9rJkCc56l4WSHsiyzdLmB2lBEOcWgBPCd0eDSOm+D5uVlKTEiDFJMvpF+Vjj9H3D/drdd5YkKbnrGanj4GyP//Kg5BnR7w5J0qnPxyZ0b5T1LnslS2jWjQ/8IMVyagE45VL3C5/SX295pKQkqej7yz2kXeNtN5X8qaA0+3NJ0qsbpRqTsz98gqRvB1m7p9LDD3wZkfUeaaekcqWzbvWXdIJTC8Ap4auxLOY5T/ta8CuSNqevBYyVNGitpB++lgr/VDzboxOOSWV7Z6wXe6FK1rucSpNKZXtgKUnHObUAnBI+NSvhuDZY0m772jPh0pVupxTdX9K4+7I/+JKkclff/wlJ2SZ8Ks2MD4DzwpdZuZLSrozVLwOkQ0+e63xBeq5nTncvKEVvveoez0i6I9vWIp7WWwDA6eGTn5Rg/4yeCv5YQlp81y4p8NMcD81PuvLQt6lX2WFRSRezbU1Otd4CADn6X3w7y5kjl60LJyVdydhebepjhpKlUnN8cnzcm92lY/3GdGvRsGQue75L0ulsW0/bbgEAp4Tvty9j4y/mduMjb34oyWNalZxv7vbbFEm7hkv3NG7cLNAj5/CdyjF8ZTm1AJxzqbu8Ufvf9lzM/fZ+klQ2NLebJ39ie8t2/6yBQaVfz76jAiWkY0lZt+5nxgfAaeHb0H6TJBWrGRQcHBwcHFwiy+2Xe0jS0UG57uCVg1OeSH9jN2GUf2S2O1SQLv+VdeMySRU5tQCccal7qWOSVOHFPvbZV7tFme8wcK0k6fOgHrntonCvXjq4YVPUujOS9j80IyzL7W12SEvaZtm4WFJbTi0AZ8z4lh6Wyq95PderzpljJa++kvrtuNpuKnf+8LdTax6WpEGXs9z2mKw/r+to3y6pcBtOLQBnhG++pFG5/2rwnf0kRUwIly50Pn+NXVmCFi72lg7vybK9RQkpek7mbe9KeqAQpxaAM8J3QFJDh/XkXY63JnZOlB4fqLF1pd19rr23tk9Jismy0fsxSW9dcdy0dZqkLpxZAE4JX3FJ5xzWP41zvLXfTuneyVKhOcWkn0Zfe3cFJGX7etF3C0mxrztsONc3TWrYgzMLwCnhC5T1m0Jt1rzveOMXM6WCc+6QVPNbSW+syP74ruH7HdaSlkvK9i17Vd+U9ElG+c49uFGyfOXBmQXgtPBF2Mv3S2iiw21rB0r6PECS1G2AlNr9SNaHz/9xku9z8elr57vESPdm/9KCwfdKGvVYlCTJmNNkraR+jTmxAHJnMW7fvq+0XiVZOrxy351ph//+zPrRFZ0oI0mnGhySen5vu+PlZhulZn94S1KapxS4XpJ890gqULtWLb+K/x7/c84FSeP6Z/9T9rXdJ8nSNuBOn9g/t0tSxxk+1zy2jA9NL3PDk6Zo5x9EbVc4CNc4Chc5CEO4brfzc3zePzY8LOOXXySPNEmqETjddkvaE4ekOuPS71hgToME/T3os8wPfzg2TbocFZWxZWgO3VO1VQ9sl4zFi9M3hI/35LwCcNKlrspG2iZWaZL05Dr7dw28u0Qq+mPGLwmqOtkifT4z86M/Wf1YpoJV/OTdHP+U8itfdvh1Q5W/+o7uAXBe+FRn6R/tKnlIUrkXVk2zd+/Ie5Im+Dnc8bFBkv6TKslS0L6xya8HPulk/bYBr0qtp+9/JZc/peSnB/7rV0CSigdNjn0+v5yamDFdm/Qd/y/PUeCWs9z+FwYux50pVf7Gvx/v3OkzaXeXv2ag0w4lXL6nzHXv1fVf4/ux9wVJqjarUfaTxgtbLnUUvMbnfv4H38dXoOZNPbx48Xuua+papUq+OjGTw43QLrXWTN/WbON9PE0BtwsfbsClt4wuszzU8qUOSwesZDiAW4tP+rqmsUcbTvWQVHic519/MBwA4TOB5JEaZH2X597OmsR4AITPBPaeLtnRtviq5vKqNXBr8RrfzUmbflLSw77Sgd8XHQ8YUjX9hsRJm6NiqtQP6G37vb/rVkuydKpsndDNSJB059O57nafHk3/6ZOGXomnyjDQAOFzHTt6SlIx3+Rho1OltZN/amfd/kd4nKTo6OkfjbNO3cKjJSne+i00q6xfw9X83tzDVy190fuemHjCB3Cp60LqjIuIuF+KDxqZ2nbCXyGX+lo3vx8aV3vKtqSdMwJPdBogSRr7liwRH79ovbn52Iha6jil2lVmfBm/NMRXhxlngBmfK/3D0V+6tHLj0KNlJrWXlkRYL2xXDtOQ4T6Sn1/XUcO+atlNUquQbxJeKGZ7lM8ALd75WvPcd3tO5e3L1ZTEOAOEz+V8o8B55SR5vS1JSuydNnCk9RbPN72GPN+ivCRL0ML1oVLyq2l9G0lpG7ysX049NnPVWgamX+HaNxXQFUYYIHyup9M0x1/yMWt/nQ/sKwPn/zVhqCQFL1wdKi3+WpcnSjvPN7Q+4rXMVRsWyGACt/9SjSG4Bfr8mOmXG23WyxnfCOgxSFskScFaI2mWNDdZWpf+ZdIeuZyPjIngZcYXYMbngppm/l0gW9TAYS3AFr5Az7WGJWmepUL8og4Z4TuRkumhti9zKK5Y+6YjKskIA4TP1R1UVYe1CjotSSrmv3W336LzTcNemuEQvuI57qGSdtuX41WNIQW41HV1vtrpsBab/tGUYK3WbHXv6jH/woXtpatfbQ+VZf8NwsY+j6oMKUD4XF2A1jusbVBAevjWXJrv0bVcy4vzNqU2ueoeqmvLRdvi8hMVCjCkAOFzdY018qR95cI7CrSH7/fEFuUVppnrsv+izMzprH72W9vid2rOiAKEz+V1Dj7xnH1lcGwN2+9IqlEqerzCpM5eiyKvET710hjrux5Rcz2HMaIA4XN5nt8X/vkh628EPh32tefU9N+FFGxEenaWSrW5vMzjGr/698UyB18xJB1qn9TTlxEFbi3e1b05w2KlTfp4niR51xtq21p9anikf9+AGnFRE48X/Mz+cl7wArUuIylskWoVv/p+S4zp9eWGDrVWzT5S9h1GGSB8riT5o0uStNP6Lu7m9PCpU+NnFo+RJDWZnPHb5IKl7pL0uE9y0LX23LNk7/XrJTWaW5FhBgifK/GJOePwAWSHb5mqGPn35qgjpQPqt3T4Lb/NNqb5S9IdG5OuXbNHdkSuP1UtuG1BRhm41Swm/XZf1//1klc7afxORZc6Cn69pPvhzQ0AhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhA0D4AIDwAQDhAwDCBwCEDwAIHwAQPgAgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AhA8ACB8AED4AIHwA4IYshjn/u0PtS8vy9sAVrnD0ITxxkY3BEDDjA4BceTEEedPSFabpinb+QdR2hYNwjaNwkYMAMz4AIHwAQPgAED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhAwDCBwCEDwDhAwDCBwCEDwAIHwAQPgAgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAED64rJjR+xgE4DbwYghc1KZf5u6QbzUGAiB8JhH3yS8HGQWAS11TGfX5wbJ9F/ZhIABmfObxYpXmwR6az0AAhM88atViDAAudQGAGZ+LSJt+UtLDvtKB3xcdDxhSNf2GxEmbo2Kq1A/oXdq6vm61JEunypKk5BkJku58mvEDCJ8b2tFTkor5Jg8bnSqtnfxTO+v2P8LjJEVHT/9oXEdJUni0JMWPliStsr5t0fxeBhDgUtf91BkXEXG/FB80MrXthL9CLvW1bn4/NK72lG1JO2cEnug0QJI09i1ZIj5+0Xpz87ERtdRxCh/SA5jxueU/HP2lSys3Dj1aZlJ7aUmE9cJ25TANGe4j+fl1HTXsq5bdJLUK+SbhhWK2R/kM0OKdrzVn+ADC576+UeC8cpK83pYkJfZOGzjSeovnm15Dnm9RXpIlaOH6UCn51bS+jaS0DV4NrTPBpEx7ahnIaAKEzz10mlbIYW3W/jof2FcGzv9rwlBJCl64OlRa/LUuT5R2nm9ofcRrVzLtaBjhA/4Hl2oMwS3Q50fH7mmzXvbJGOFB2iJJCtYaSbOkucnSOgXleAI4HwAzPjfR1JJpdYsaOKwF2MIX6LnWsCTNs1SIX9QhI3wnUjI9tCiDCRA+t3RQVR3WKui0JKmY/9bdfovONw17aYZD+IozXACXuvmBr3Y6rMWqomzXuqs1W927esy/cGF76eqME0D48pEArXdY26CA9PCtuTTfo2u5lhfnbUptwjABhC8/aayRJ+0rF95RoD18vye2KK8wzbRf6QIgfPlD5+ATz9lXBsfW6G9dqlEqerzCpM5eiyIJH0D48hfP7wv//FC8JOl02NeeUwvbtgcbkZ6dpVJtLi/zaMwwAU7Du7o3Z1istEkfz5Mk73pDbVurTw2P9O8bUCMuauLxgp/ZX84LXqDWZSSFLVKtq7+bG/lNiqSNGjpBUqnhlRhogPC5jOSPLknSTuu7uJvTw6dOjZ9ZPEaS1GSyn/3ewVJ3SXrcJ/kaV7oL5lr/PypKkroRPoDwuQ6fmDMOH0B2+JapipF/b446UjqgfkvPjI3NNqb5S9IdG5MqXn2/nwy4lLFSnG+vAgifK6lQIbdbmjXLtqlAQ9uC/zVPix9DC9w+vLkBgPABAOEDAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhAwDCBwCEDwAIHwAQPgAgfAAIHwAQPgAgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHgPABAOEDAMIHAPmBxTDnf3eofWlZ3h64whWOPoQnLrIxGAJmfACQKy+GIG9ausI0XdHOP4jarnAQrnEULnIQYMYHAIQPAAgfAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhAwDCBwCEDwAIHwAQPgAgfABA+ACA8AEgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfALPzYghcVPIvC/frvoAnijAUAOEzib3tYiSt0ohvWzMYAJe6prCxaUzVL/5eM77O/jZzGA2A8JnBvpATz+75T9OgfptfNfrFMR4A4TOB9y50HOctSQXGPPTvB4wHQPhMYLlluMW6ZInQGsYDuLV4c8Ml9Tf80xf9vXemcJYAwudC0qaflPSwr3Tg90XHA4ZUTb8hcdLmqJgq9QN6l7aur1stydKpsiQpeUaCpDufznW3b2Usnr1SmpMEED5XsqOnJBXzTR42OlVaO/mndtbtf4THSYqOnv7RuI6SpPBoSYofLUla1UeS1Pze6/gTRiuAYQZuLV7juzl1xkVE3C/FB41MbTvhr5BLfa2b3w+Nqz1lW9LOGYEnOg2QJI19S5aIj1+03tx8bEQtdZxS7Tr+gA2j9X8MM3BrWQxz/neH2peW3fS+3nm3/7yjZSa1l1IiSj8nSStbGYOH+0hS6qhhV2Z1kySjTMLZYg4HsPyv5tex79hmJ/qNz3bSFO38EaztCgfhGkfhIgdh0r/KXOo6zzcKnFdOktfbkqTE3mkDR1pv8XzTa8jzLcpLsgQtXB8qJb+a1reRlLbBq6F1JpiUaU8tAzOt7mt1otVYxhcgfK6o07RCDmuz9tfJ+OzdwPl/TRgqScELV4dKi7/W5YnSzvMNrY947UqmHQ3LFL6Y0PjgXwswvMAtxmt8t0KfHx27p8162SdjhAdpiyQpWGskzZLmJkvrFJTjCci0+k+LQ0GLijG8ADM+V9TUkml1ixo4rAXYwhfoudawJM2zVIhf1CEjfCdSMj20qMPy2ocTWiykewDhcwsHVdVhrYJOS5KK+W/d7bfofNOwl2Y4hK94rntZ9nhim18LM5oAl7puwVc7HdZiVVG2a93Vmq3uXT3mX7iwvXT1a+zk1/aJD8+newDhcxMBWu+wtiH9E8jBWnNpvkfXci0vztuU2uQa+5jWJbnj3IKMJUD43ERjjTxpX7nwjgLt4fs9sUV5hWmm/Uo3N1/1Suk1m/dzAcLnNjoHn3jOvjI4tkZ/61KNUtHjFSZ19loUeY3wjRhgDJ5se/01ficjChA+l+f5feGfH4qXJJ0O+9pzavordcFGpGdnqVSby8s8Gl9tB2Pesowelf5OcadGKQwpcEuZ5V3dQ31y+4Ge0ExrlkmV8rTfYbHSJn08T5K86w21ba0+NTzSv29AjbioiccLfmZ/OS94gVqXkRS2SLWKX22vUdKKFfaVK0lFeaIChC/vKoUuva77heate8kfXZKkndar0c3p4VOnxs8sHiNJajLZz37vYKm7JD3uk3yNl/hkzHdc4XkKEL4b8eSy68mH5am87dUn5ozDdajDt0xVjPx7c9SR0gH1W3pmbGy2Mc1fku7YmFTxqrsd+Wqmc8SHmAHCd0Mqt76er2FpXSmPu61QIbdbmjXLtqlAQ9uC/zX2evfdPDOB28g8b248Zbn1Ez4AhM/Fp3zXMeGrzDMCIHzmmvIx4QMIn+mmfEz4AMJntikfEz6A8JluyseEDyB8ZpvyMeEDCJ/ppnxM+ADCZ7YpHxM+gPCZbsrHhA8gfGab8jHhAwif6aZ8TPgAwme2KR8TPoDwmW7Kx4QPIHxmm/Ix4QMIX36e8rXKaWsrJnwA4cvHeuYw5bP05JkAED6TTfmY8AGEz2xTPiZ8AOEz3ZSPCR9A+Mw25WPCBxA+0035mPABhM9sUz4mfADhM92UjwkfQPjMNuVjwgcQPtNN+ZjwAYTPbFM+JnwA4TPdlI8JH0D4zDblY8IHED7TTfmY8AGEz2xTPiZ8AOEz05QvRJJCmPABhM9EelkkSy+eAQDhM9mUjwkfQPjMNuVjwgeYk8Uw73/7+3o77w9a4QpHHsITF9kYDMH18zLxfzvzPYAZHwCYgwdDAIDwAQDhAwDCBwCEDwAIHwAQPgAgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AIHwACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhAwDCB4DwAQDhAwDCBwCEDwAIHwAQPgAgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AIHwACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhAwDCB4DwAQDhAwDCBwCEDwAIHwAQPgAgfADgEv4fNS7SXkUzLQcAAAAASUVORK5CYII="}}]); \ No newline at end of file diff --git a/assets/js/7f6a1755.f9c91d8f.js b/assets/js/7f6a1755.f9c91d8f.js deleted file mode 100644 index b47ff1f6c..000000000 --- a/assets/js/7f6a1755.f9c91d8f.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkuc_2_docs=self.webpackChunkuc_2_docs||[]).push([[822],{3905:(e,t,a)=>{a.d(t,{Zo:()=>o,kt:()=>g});var n=a(67294);function s(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function i(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function r(e){for(var t=1;t=0||(s[a]=e[a]);return s}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(s[a]=e[a])}return s}var p=n.createContext({}),l=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):r(r({},t),e)),a},o=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},A=n.forwardRef((function(e,t){var a=e.components,s=e.mdxType,i=e.originalType,p=e.parentName,o=m(e,["components","mdxType","originalType","parentName"]),A=l(a),g=s,h=A["".concat(p,".").concat(g)]||A[g]||c[g]||i;return a?n.createElement(h,r(r({ref:t},o),{},{components:a})):n.createElement(h,r({ref:t},o))}));function g(e,t){var a=arguments,s=t&&t.mdxType;if("string"==typeof e||s){var i=a.length,r=new Array(i);r[0]=A;var m={};for(var p in t)hasOwnProperty.call(t,p)&&(m[p]=t[p]);m.originalType=e,m.mdxType="string"==typeof e?e:s,r[1]=m;for(var l=2;l{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>r,default:()=>c,frontMatter:()=>i,metadata:()=>m,toc:()=>l});var n=a(87462),s=(a(67294),a(3905));const i={},r="Stage Mapping and Stage Calibration",m={unversionedId:"Investigator/XYZMicroscope/StageCalibration",id:"Investigator/XYZMicroscope/StageCalibration",title:"Stage Mapping and Stage Calibration",description:"Stage Coordinates",source:"@site/docs/02_Investigator/02_XYZMicroscope/StageCalibration.md",sourceDirName:"02_Investigator/02_XYZMicroscope",slug:"/Investigator/XYZMicroscope/StageCalibration",permalink:"/docs/Investigator/XYZMicroscope/StageCalibration",draft:!1,tags:[],version:"current",frontMatter:{},sidebar:"tutorialSidebar",previous:{title:"openUC2 Setting up the tube lens",permalink:"/docs/Investigator/XYZMicroscope/SetupTubelens"},next:{title:"Stage Scanning and Image Stitching (ASHLAR)",permalink:"/docs/Investigator/XYZMicroscope/StageScanning"}},p={},l=[{value:"Stage Coordinates",id:"stage-coordinates",level:2},{value:"Alignment of Axes",id:"alignment-of-axes",level:3},{value:"Understanding Axes in NumPy",id:"understanding-axes-in-numpy",level:3},{value:"Stage Coordinate System",id:"stage-coordinate-system",level:3},{value:"Aligning Axes with Imswitch",id:"aligning-axes-with-imswitch",level:3},{value:"Steps for Aligning Axes:",id:"steps-for-aligning-axes",level:4},{value:"Stage Calibration",id:"stage-calibration",level:2},{value:"Interpreation of the Matrix",id:"interpreation-of-the-matrix",level:2},{value:"Calibration Matrix image_to_stage_displacement",id:"calibration-matrix-image_to_stage_displacement",level:3},{value:"Entries and Their Names",id:"entries-and-their-names",level:4},{value:"Summary of the Matrix Entries",id:"summary-of-the-matrix-entries",level:3},{value:"Interpretation of the Values",id:"interpretation-of-the-values",level:2},{value:"1. image_to_stage_displacement Matrix",id:"1-image_to_stage_displacement-matrix",level:3},{value:"2. backlash_vector Matrix",id:"2-backlash_vector-matrix",level:3}],o={toc:l};function c(e){let{components:t,...i}=e;return(0,s.kt)("wrapper",(0,n.Z)({},o,i,{components:t,mdxType:"MDXLayout"}),(0,s.kt)("h1",{id:"stage-mapping-and-stage-calibration"},"Stage Mapping and Stage Calibration"),(0,s.kt)("h2",{id:"stage-coordinates"},"Stage Coordinates"),(0,s.kt)("p",null,"In this tutorial, we will guide you through the process of aligning the coordinate systems for the UC2 microscope stage. Proper alignment ensures that the movement of the stage corresponds accurately with the image displayed on the screen, facilitating an intuitive user experience. In principle all of this can be handled in software (e.g. flipping the camera image, changing stage axis), but it's always good to start with a common ground from the hardware side."),(0,s.kt)("p",null,(0,s.kt)("img",{src:a(14006).Z,width:"1792",height:"948"}),"\n",(0,s.kt)("em",{parentName:"p"},"This is the microscope (UC2 XYZ v3) with the ingredients controlled by ImSwitch")),(0,s.kt)("h3",{id:"alignment-of-axes"},"Alignment of Axes"),(0,s.kt)("p",null,"The goal of aligning the coordinate systems is to ensure they are correctly matched. The alignment of the stage is considered from the origin point (zero point). The desired behavior is as follows:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},"When the stage moves to the right (x+), the image on the screen should also move to the right."),(0,s.kt)("li",{parentName:"ul"},"When the stage moves upwards (y+), the image on the screen should move upwards as well.")),(0,s.kt)("p",null,(0,s.kt)("img",{src:a(95223).Z,width:"3217",height:"751"})),(0,s.kt)("p",null,"This is illustrated in the following Figure. When viewing the sample from above with the microscope positioned in front, the image should match what is shown in Imswitch.\nThis is also represented by the ",(0,s.kt)("inlineCode",{parentName:"p"},"VirtualMicroscope")," with the ",(0,s.kt)("inlineCode",{parentName:"p"},"VirtualStage")," and ",(0,s.kt)("inlineCode",{parentName:"p"},"VirtualCamera")," in this config (Config: ",(0,s.kt)("a",{parentName:"p",href:"https://github.com/openUC2/ImSwitchConfig/blob/master/imcontrol_setups/example_virtualmicroscope.json"},"https://github.com/openUC2/ImSwitchConfig/blob/master/imcontrol_setups/example_virtualmicroscope.json"),")."),(0,s.kt)("h3",{id:"understanding-axes-in-numpy"},"Understanding Axes in NumPy"),(0,s.kt)("p",null,"It's important to note the labeling of axes. In NumPy, x = 1 and y = 0. This means:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},"The x-axis is the second axis (index 1) of an array."),(0,s.kt)("li",{parentName:"ul"},"The y-axis is the first axis (index 0) of an array.")),(0,s.kt)("p",null,(0,s.kt)("img",{src:a(55828).Z,width:"1272",height:"652"})),(0,s.kt)("p",null,"NumPy arrays are multidimensional, with axes numbered as follows:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},"Axis 0 is the first axis (often the vertical direction)."),(0,s.kt)("li",{parentName:"ul"},"Axis 1 is the second axis (often the horizontal direction).")),(0,s.kt)("h3",{id:"stage-coordinate-system"},"Stage Coordinate System"),(0,s.kt)("p",null,"When viewing the stage from above, the coordinate system is arranged as follows:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"X-Axis (Horizontal)"),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},"Positive direction: Right"),(0,s.kt)("li",{parentName:"ul"},"Negative direction: Left"))),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Y-Axis (Vertical)"),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},"Positive direction: Up"),(0,s.kt)("li",{parentName:"ul"},"Negative direction: Down")))),(0,s.kt)("p",null,(0,s.kt)("img",{src:a(6161).Z,width:"857",height:"646"})),(0,s.kt)("h3",{id:"aligning-axes-with-imswitch"},"Aligning Axes with Imswitch"),(0,s.kt)("p",null,'To enable intuitive operation, the stage and camera axes must be correctly aligned with the coordinate system in Imswitch. To achieve this, the commands "flip x" and "flip y" are used. These commands invert the direction of the axes in the coordinate system, meaning that movement or position along the axes is reversed.'),(0,s.kt)("h4",{id:"steps-for-aligning-axes"},"Steps for Aligning Axes:"),(0,s.kt)("ol",null,(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"Initial Setup:")),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},"Ensure the microscope and stage are properly connected to the control software (e.g., Imswitch)."))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"Define Origin:")),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},"Identify the origin (zero point) of the stage coordinate system. (in Hardware this would be defined by the Endstops that are used for homing the axes; The motor will run - if the direction is set correctly - until it hits the switch)"))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"Test Movement:")),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},"Move the stage to the right and observe the direction of the image on the screen.",(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},'If the image moves left, apply the "flip x" command.'))),(0,s.kt)("li",{parentName:"ul"},"Move the stage upwards and observe the direction of the image on the screen.",(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},'If the image moves down, apply the "flip y" command.'))))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"Adjust Axes:")),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("p",{parentName:"li"},"Use the following commands as needed to align the axes:"),(0,s.kt)("pre",{parentName:"li"},(0,s.kt)("code",{parentName:"pre",className:"language-python"},"# Flip the x-axis if necessary\nif x_movement_incorrect:\n stage.flip_x()\n\n# Flip the y-axis if necessary\nif y_movement_incorrect:\n stage.flip_y()\n"))))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"Verify Alignment:")),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},"After applying the flips, verify that the stage movements correspond correctly with the image movements on the screen."))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"Save Configuration:")),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},"Save the configuration settings to ensure the alignment persists across sessions.")))),(0,s.kt)("h2",{id:"stage-calibration"},"Stage Calibration"),(0,s.kt)("p",null,"Richard Bowman and his team provided a very nice way to calibrate stage coordinates to camera pixel coordinates. We burtally integrated the open-source software which you can find here: ",(0,s.kt)("a",{parentName:"p",href:"https://gitlab.com/openflexure/microscope-extensions/camera-stage-mapping"},"https://gitlab.com/openflexure/microscope-extensions/camera-stage-mapping")," into ImSwitch. If you activate the ",(0,s.kt)("inlineCode",{parentName:"p"},"HistoScan")," Controller and Widget you can start it either by the GUI or using the HTTP interface by calling http://localhost:8002/HistoScanController/startStageMapping (URL and PORT may differ). What the stage will do is moving a certain series of steps in XY, performs a cross-correlation of the images and computes the shift in XY of the mciroscope image on the camera, compares it to the expected shift on and returns the Image-To-Stage-Displacement Matrix as well as the Backlashvector. Both matrices/vectors are microscope specificand will help you matching e.g. stage coordinates for stitching software such as ASHLAR or OFM Stitching. This document should give you a rough idea of what's happening."),(0,s.kt)("p",null,"Some terminology:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"Combine X and Y calibrations"),": The calibration involves combining two separate measurements or calibration runs for the x and y directions, ensuring that the directions are orthogonal (at right angles to each other).")),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"2x2 transformation matrix"),": The ",(0,s.kt)("inlineCode",{parentName:"p"},"image_to_stage_displacement")," matrix maps image displacements to stage displacements. This ensures that movements in the image coordinate system are accurately translated to movements in the stage coordinate system.")),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"backlash_vector"),": This is a vector estimating the backlash (mechanical slack or play in the system) in each direction. In this case, the estimated backlash is zero, indicating a precise calibration with no noticeable mechanical play.")),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"backlash"),": The function is expected to return the highest element of ",(0,s.kt)("inlineCode",{parentName:"p"},"backlash_vector")," as a scalar value, which would be zero in this case."))),(0,s.kt)("h2",{id:"interpreation-of-the-matrix"},"Interpreation of the Matrix"),(0,s.kt)("h3",{id:"calibration-matrix-image_to_stage_displacement"},"Calibration Matrix ",(0,s.kt)("inlineCode",{parentName:"h3"},"image_to_stage_displacement")),(0,s.kt)("p",null,"The entries of the calibration matrix ",(0,s.kt)("inlineCode",{parentName:"p"},"image_to_stage_displacement")," can be given specific names and meanings based on their positions within the matrix. Let's denote the matrix as follows:"),(0,s.kt)("div",{className:"math math-display"},(0,s.kt)("span",{parentName:"div",className:"katex-display"},(0,s.kt)("span",{parentName:"span",className:"katex"},(0,s.kt)("span",{parentName:"span",className:"katex-mathml"},(0,s.kt)("math",{parentName:"span",xmlns:"http://www.w3.org/1998/Math/MathML",display:"block"},(0,s.kt)("semantics",{parentName:"math"},(0,s.kt)("mrow",{parentName:"semantics"},(0,s.kt)("mtext",{parentName:"mrow"},"image_to_stage_displacement"),(0,s.kt)("mo",{parentName:"mrow"},"="),(0,s.kt)("mrow",{parentName:"mrow"},(0,s.kt)("mo",{parentName:"mrow",fence:"true"},"("),(0,s.kt)("mtable",{parentName:"mrow",rowspacing:"0.16em",columnalign:"center center",columnspacing:"1em"},(0,s.kt)("mtr",{parentName:"mtable"},(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mn",{parentName:"mstyle"},"0"))),(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mrow",{parentName:"mstyle"},(0,s.kt)("mo",{parentName:"mrow"},"\u2212"),(0,s.kt)("mn",{parentName:"mrow"},"1.0"))))),(0,s.kt)("mtr",{parentName:"mtable"},(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mrow",{parentName:"mstyle"},(0,s.kt)("mo",{parentName:"mrow"},"\u2212"),(0,s.kt)("mn",{parentName:"mrow"},"1.0")))),(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mn",{parentName:"mstyle"},"0"))))),(0,s.kt)("mo",{parentName:"mrow",fence:"true"},")"))),(0,s.kt)("annotation",{parentName:"semantics",encoding:"application/x-tex"},"\\text{image\\_to\\_stage\\_displacement} = \\begin{pmatrix} 0 & -1.0 \\\\ -1.0 & 0 \\end{pmatrix}")))),(0,s.kt)("span",{parentName:"span",className:"katex-html","aria-hidden":"true"},(0,s.kt)("span",{parentName:"span",className:"base"},(0,s.kt)("span",{parentName:"span",className:"strut",style:{height:"1.0044em",verticalAlign:"-0.31em"}}),(0,s.kt)("span",{parentName:"span",className:"mord text"},(0,s.kt)("span",{parentName:"span",className:"mord"},"image_to_stage_displacement")),(0,s.kt)("span",{parentName:"span",className:"mspace",style:{marginRight:"0.2778em"}}),(0,s.kt)("span",{parentName:"span",className:"mrel"},"="),(0,s.kt)("span",{parentName:"span",className:"mspace",style:{marginRight:"0.2778em"}})),(0,s.kt)("span",{parentName:"span",className:"base"},(0,s.kt)("span",{parentName:"span",className:"strut",style:{height:"2.4em",verticalAlign:"-0.95em"}}),(0,s.kt)("span",{parentName:"span",className:"minner"},(0,s.kt)("span",{parentName:"span",className:"mopen delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size3"},"(")),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mtable"},(0,s.kt)("span",{parentName:"span",className:"col-align-c"},(0,s.kt)("span",{parentName:"span",className:"vlist-t vlist-t2"},(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"1.45em"}},(0,s.kt)("span",{parentName:"span",style:{top:"-3.61em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord"},"0"))),(0,s.kt)("span",{parentName:"span",style:{top:"-2.41em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord"},"\u2212"),(0,s.kt)("span",{parentName:"span",className:"mord"},"1.0")))),(0,s.kt)("span",{parentName:"span",className:"vlist-s"},"\u200b")),(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.95em"}},(0,s.kt)("span",{parentName:"span"}))))),(0,s.kt)("span",{parentName:"span",className:"arraycolsep",style:{width:"0.5em"}}),(0,s.kt)("span",{parentName:"span",className:"arraycolsep",style:{width:"0.5em"}}),(0,s.kt)("span",{parentName:"span",className:"col-align-c"},(0,s.kt)("span",{parentName:"span",className:"vlist-t vlist-t2"},(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"1.45em"}},(0,s.kt)("span",{parentName:"span",style:{top:"-3.61em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord"},"\u2212"),(0,s.kt)("span",{parentName:"span",className:"mord"},"1.0"))),(0,s.kt)("span",{parentName:"span",style:{top:"-2.41em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord"},"0")))),(0,s.kt)("span",{parentName:"span",className:"vlist-s"},"\u200b")),(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.95em"}},(0,s.kt)("span",{parentName:"span"}))))))),(0,s.kt)("span",{parentName:"span",className:"mclose delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size3"},")")))))))),(0,s.kt)("div",{className:"math math-display"},(0,s.kt)("span",{parentName:"div",className:"katex-display"},(0,s.kt)("span",{parentName:"span",className:"katex"},(0,s.kt)("span",{parentName:"span",className:"katex-mathml"},(0,s.kt)("math",{parentName:"span",xmlns:"http://www.w3.org/1998/Math/MathML",display:"block"},(0,s.kt)("semantics",{parentName:"math"},(0,s.kt)("mrow",{parentName:"semantics"},(0,s.kt)("mo",{parentName:"mrow",fence:"true"},"("),(0,s.kt)("mtable",{parentName:"mrow",rowspacing:"0.16em",columnalign:"center center",columnspacing:"1em"},(0,s.kt)("mtr",{parentName:"mtable"},(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mi",{parentName:"mstyle"},"a"))),(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mi",{parentName:"mstyle"},"b")))),(0,s.kt)("mtr",{parentName:"mtable"},(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mi",{parentName:"mstyle"},"c"))),(0,s.kt)("mtd",{parentName:"mtr"},(0,s.kt)("mstyle",{parentName:"mtd",scriptlevel:"0",displaystyle:"false"},(0,s.kt)("mi",{parentName:"mstyle"},"d"))))),(0,s.kt)("mo",{parentName:"mrow",fence:"true"},")")),(0,s.kt)("annotation",{parentName:"semantics",encoding:"application/x-tex"},"\\begin{pmatrix} a & b \\\\ c & d \\end{pmatrix}")))),(0,s.kt)("span",{parentName:"span",className:"katex-html","aria-hidden":"true"},(0,s.kt)("span",{parentName:"span",className:"base"},(0,s.kt)("span",{parentName:"span",className:"strut",style:{height:"2.4em",verticalAlign:"-0.95em"}}),(0,s.kt)("span",{parentName:"span",className:"minner"},(0,s.kt)("span",{parentName:"span",className:"mopen delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size3"},"(")),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mtable"},(0,s.kt)("span",{parentName:"span",className:"col-align-c"},(0,s.kt)("span",{parentName:"span",className:"vlist-t vlist-t2"},(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"1.45em"}},(0,s.kt)("span",{parentName:"span",style:{top:"-3.61em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal"},"a"))),(0,s.kt)("span",{parentName:"span",style:{top:"-2.41em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal"},"c")))),(0,s.kt)("span",{parentName:"span",className:"vlist-s"},"\u200b")),(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.95em"}},(0,s.kt)("span",{parentName:"span"}))))),(0,s.kt)("span",{parentName:"span",className:"arraycolsep",style:{width:"0.5em"}}),(0,s.kt)("span",{parentName:"span",className:"arraycolsep",style:{width:"0.5em"}}),(0,s.kt)("span",{parentName:"span",className:"col-align-c"},(0,s.kt)("span",{parentName:"span",className:"vlist-t vlist-t2"},(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"1.45em"}},(0,s.kt)("span",{parentName:"span",style:{top:"-3.61em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal"},"b"))),(0,s.kt)("span",{parentName:"span",style:{top:"-2.41em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"3em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal"},"d")))),(0,s.kt)("span",{parentName:"span",className:"vlist-s"},"\u200b")),(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.95em"}},(0,s.kt)("span",{parentName:"span"}))))))),(0,s.kt)("span",{parentName:"span",className:"mclose delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size3"},")")))))))),(0,s.kt)("h4",{id:"entries-and-their-names"},"Entries and Their Names"),(0,s.kt)("ol",null,(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"a (0)"),":"),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Name:")," ",(0,s.kt)("inlineCode",{parentName:"li"},"a")),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Meaning:")," Represents the scaling factor from the x-coordinate in the image to the x-coordinate in the stage. Here, it is 0, indicating no direct mapping from image x to stage x."))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"b (-1.0)"),":"),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Name:")," ",(0,s.kt)("inlineCode",{parentName:"li"},"b")),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Meaning:")," Represents the scaling factor from the y-coordinate in the image to the x-coordinate in the stage. The value -1.0 indicates an inverse and slightly scaled mapping from image y to stage x."))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"c (-1.0)"),":"),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Name:")," ",(0,s.kt)("inlineCode",{parentName:"li"},"c")),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Meaning:")," Represents the scaling factor from the x-coordinate in the image to the y-coordinate in the stage. The value -1.0 indicates an inverse and slightly scaled mapping from image x to stage y."))),(0,s.kt)("li",{parentName:"ol"},(0,s.kt)("p",{parentName:"li"},(0,s.kt)("strong",{parentName:"p"},"d (0)"),":"),(0,s.kt)("ul",{parentName:"li"},(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Name:")," ",(0,s.kt)("inlineCode",{parentName:"li"},"d")),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"Meaning:")," Represents the scaling factor from the y-coordinate in the image to the y-coordinate in the stage. Here, it is 0, indicating no direct mapping from image y to stage y.")))),(0,s.kt)("h3",{id:"summary-of-the-matrix-entries"},"Summary of the Matrix Entries"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"a (0)"),": No direct mapping from image x to stage x."),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"b (-1.0)"),": Inverse mapping from image y to stage x."),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"c (-1.0)"),": Inverse mapping from image x to stage y."),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"d (0)"),": No direct mapping from image y to stage y.")),(0,s.kt)("p",null,"This calibration matrix indicates that there is a transformation involving a 90-degree rotation combined with an inverse scaling factor slightly above 1 between the image coordinates and the stage coordinates. The exact interpretation may depend on the specific application, but generally, it implies that movements in one direction in the image are mapped to movements in the perpendicular direction on the stage with a slight scaling adjustment."),(0,s.kt)("h2",{id:"interpretation-of-the-values"},"Interpretation of the Values"),(0,s.kt)("p",null,"The following simulation of the ",(0,s.kt)("inlineCode",{parentName:"p"},"VirtualMicroscope")," inside ImSwitch (Config: ",(0,s.kt)("a",{parentName:"p",href:"https://github.com/openUC2/ImSwitchConfig/blob/master/imcontrol_setups/example_virtualmicroscope.json"},"https://github.com/openUC2/ImSwitchConfig/blob/master/imcontrol_setups/example_virtualmicroscope.json"),"):"),(0,s.kt)("p",null,(0,s.kt)("img",{src:a(15346).Z,width:"480",height:"311"})),(0,s.kt)("p",null,"The result of the stage mapping is a json file containing (under ",(0,s.kt)("inlineCode",{parentName:"p"},"/ImSwitch/calibFile.json"),") the following important element:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-json"},' "camera_stage_mapping_calibration": {\n "backlash": 0.0,\n "backlash_vector": [\n 0.0,\n 0.0,\n 0.0\n ],\n "image_to_stage_displacement": [\n [\n 0.0,\n -1.0\n ],\n [\n -1.0,\n 0.0\n ]\n ]\n }\n')),(0,s.kt)("p",null,"The provided matrices explains transforming image coordinates to stage coordinates and estimating backlash. Let's break down the interpretation of the entries:"),(0,s.kt)("h3",{id:"1-image_to_stage_displacement-matrix"},"1. ",(0,s.kt)("inlineCode",{parentName:"h3"},"image_to_stage_displacement")," Matrix"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'mData["camera_stage_mapping_calibration"]["image_to_stage_displacement"] =\narray([[ 0. , -1.00135997],\n [-1.00135997, 0. ]])\n')),(0,s.kt)("p",null,"This matrix is a 2x2 transformation matrix used to map image coordinates to stage coordinates. Each entry in this matrix has a specific meaning:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"[","0,0","]"," = 0"),": There is no direct transformation of the x-coordinate in the image to the x-coordinate in the stage."),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"[","0,1","]"," = -1.00135997"),": The y-coordinate in the image inversely affects the x-coordinate in the stage."),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"[","1,0","]"," = -1.00135997"),": The x-coordinate in the image inversely affects the y-coordinate in the stage."),(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("strong",{parentName:"li"},"[","1,1","]"," = 0"),": There is no direct transformation of the y-coordinate in the image to the y-coordinate in the stage.")),(0,s.kt)("p",null,"The presence of -1.00135997 off-diagonal elements indicates that the transformation involves a negative and approximately unit scaling between the coordinates, implying a possible 90-degree rotation combined with a scaling factor close to -1."),(0,s.kt)("h3",{id:"2-backlash_vector-matrix"},"2. ",(0,s.kt)("inlineCode",{parentName:"h3"},"backlash_vector")," Matrix"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-python"},'mData["camera_stage_mapping_calibration"]["backlash_vector"] =\narray([ 0., 0., 0.])\n')),(0,s.kt)("p",null,"This vector represents the estimated backlash in each direction (x, y, and possibly z, though z is not utilized in a 2D context). Here, all elements are zero, indicating no measurable backlash in the x and y directions."))}c.isMDXComponent=!0},15346:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/StageMapping1-c4576ec64061a4416996e558150edf24.gif"},95223:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/StageMapping2-d11998287b8c6da911a5e7300276200f.png"},6161:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/StageMappingSampleView-0688332923e7a42a80b02895e1013c90.png"},14006:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/StageOverview-4d690420657841eae78311f94bd0ece2.png"},55828:(e,t,a)=>{a.d(t,{Z:()=>n});const n="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAABPgAAAKMCAAAAABTT1mrAAAmRUlEQVR42u3deXhM9+LH8c9kEXspilqrSJAiCImlQmir2qo92lJSqq3b1dLl9keri1C6aqu0llJrW62lFVtL1b5EEUuCIPYlSpCQ5Pz+mMlksiGWOzM579fz3Oeec2bmOP2e8fY9M5OJxRAAmIsHQwCA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AAgfABA+ACB8AED4AIDwAQDhAwDCBwCEDwAIHwAQPgAgfABA+ACA8AEA4QMAwgeA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AAgfABA+ACB8AED4AIDwAQDhAwDCBwCEDwAIHwAQPgAgfABA+ACA8AEA4QMAwgeA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AAgfABA+5FOp+xkDgPCZy8yq1e5bxzAAksVgDEziStUj0oOLruOeO/fYF+vew8AhH/JiCMzi+BFJO67nntM+tC9++QIDBy514b4q3i/pScYBYMZnJlO/jA7pdz13rMJYIZ/jNT5klyJJWvYQl7pgxgeTPSt4biDf4jU+AIQPALjUhevat3LNkdPehatXr9XG/i/Ym3HSf/0z7mMMPiy9HCRp9RdShP2Ni8SfYw8cPH13tWr3BFVgJEH44CY2DVluX27wWfP0OfxMacuGYvZbRoyR6teTpLUzpddt4Uv44vPTkrRNkleHF1symuBSF24g7snAjO5pc4v+tqXhodLuvvYblg+VSvxYKOtU8b5hp+0rKT+FPMB4ghkfXN+u+smSVNbf/464XesljW8VJknynNEgXrObvWS925EeqbJMuTfLg8+GHpZUo+3dReP37f3HkJoxoCB8cH01626Qz8BXykiSVry6RXqpbSlJUpnZLa9oUOMgSUrpfkJ6/bGsD54YJ5Wa+pBFkrR95MxSAxlQcKkLNzhxX3l03vmBtXtqubqmdHKs7abgMdKVbqck6Y1VUqv3sz14smT5rZ21e/KfumdBUQYUhA9uoNGRHzO+OaXgdxZpe/raiz2kQ0+mSXPHSBVmemZ77D6pTuOM1XsaMZwgfHALZR1XmleWdtvXJtSWFr+n2D6S9+y7cjzr3gwgCB/cXS0pJi19pchPxaThv3b5V/qoaQ53vlfa8hNjBsIHN1ddSkqwr/l9J6U9vlXq9nJOd24uqUv3WEYNZsW7um4sLfrQoUNnrd+vs0qSwzftdH3l0/QA5mDo1H+l2XPqNGvQ6D6ueUH44D5SZ3ywK/dbP1q6XdLPOb9dW2bjs39IxvbtUsEGrR5twmCCS124hYT6Pa/SPZ0+JeX+TfPVl39r+wHdpNUfBD1xguEEMz64gZRu2yU1bXd3+QKSpK8zv1mRGnZMksL9/XJ5/DPP7Phz/fo9aZI0Y9HkxxhRED64vMVLpVK/ZXwYb2Hmm//7p2QxdL7T+lw/m1ynzgBd2Lpq2jYpoU/MnQwpuNSFq/tV0uzGud46Siq5t4e0s+9V91Kk6ZCo6WWlM7MYURA+uLwNUolWDuv/Ot64t7chy5R7xvtJsz691jOgx0RJexhRED64g0IWh9T94HBDUpez0uBHVXROYWnwqmvtp4GkkwwnCB9cXj3paJx97cqAZIfbBkRJ938oyf8rKaXbsWwPnvbNFYe19ZL4EmYQPri+QEk90muX+Eikw00TJ0plrV9N8HS4dLR7SpbHHn3+uZoT7Rv3PS8piBEF4YPL61BaWhs444qkkxG1FjvcEjVA8pxR3roytq608o0sj307UXHP+PYe+eueEzt+71P3iFSrAyMKM+HjLG6qwqwHUrXtiZ53p106I8mn6zTbDWe7JEnvpr/vUWhOo/MaE9Ql02PLe6RJ+/ZlbCgzg38AwYwPbqD19EqSUg8dPiOp9qr0Hzszeu+V2r1lv1vNbyWFZ/4Zj/f/esDhfRFZ2q2sx3iC8MEddIv5vMEdklSg3eTNjUrati74Vao8zaFr3QZI51+VpEKSpYQkqWnk3hHtrY8o6td/x285/3hHSdv/gHzHYjAG7uzMAc9ypW/sXy/j39OpVyqUYAxB+ACAS10AIHwAQPgAgPABAOEDAMIH4LodZAgIH2A24SMOMQi3Gp/jA1xbqCytn6rMOBA+wFThE+kjfID5wkf6CB9gwvCRPsIHmDB8pI/wASYMH+kjfIAJw0f6CB9gwvCRPsIHmDB8kqVVT9JH+ABzhY/0ET7AhOEjfYQPMGH4SB/hA0wYPtJH+AATho/0ET7+KsCcf4NJH+EjfCB9IHyED2b4WxzSi/QRPsIH0gfCR/hA+uCI37kB5AsGc5g88GIIAPef77XsVYVRIHwA2QPhA8geCB9A9kw8bLwiCri0ULLHjA8A2SN8gFmzF9KT7BE+gOyB8AFkD4QPIHsgfADZI3wAyB7hA0D2CB8Askf4AJA9wgeA7BE+AGSP8AEge4QPwFWzx2/TuPWDytdSAS7tINkjfABw0/gtawAIHwAQPgAgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHgPABAOEDAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhAwDCBwCEDwAIHwAQPgAgfAAIHwAQPgAgfABA+ACA8AEA4QMAwgcAhA95tmIFB8FQuOT5IHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AED4ABA+hgAA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AED4ABA+ACB8AED4AIDwAQDhAwDCBwCEDwCcyYshMJlL65Ik6R7f67p36s+rNxQPfKCZUw9CSlpy9FlnD8XuqVuPVq7/YBOnHsTeGXtiC9YK6FWAJ/JNMuB2/vzzJh483Hre+1zXnRMetN77tcvOPIhpnYuogbOH4l3rnS0vnHfeQRzpZZuo+P5xaw/CfJjxmc3jcSlS3Mrrm2gF77qzT9tzkVM+PjLDWQcxefqfV5w/FKnPT/B4smvlg0u/+mrtmgJOOoj9bfZ5P9u4rrF1xO4H1wbwVGbGx4wvjyIVfj13+0SF1hqGYSzw1lInHcRxyaP5mL9vy4wvD0PxvEpvNAzDMDZX0svOOoheqrXJMAzDuNhZNZOZ8d0M3txAri6M0PdNJKn9kxrupGO4a9yko3+9dq+TR2LfeI/IhpKkgOmen8U76Sie/e+GBpKkQpOq7Ini+XkzCB9y9fUJ3y7WpcGWlXuddBD9e9/l/JFYkdqpgW2x+aNa66SjaPZ+EdtSsfbawPOT8OG2WK9w21LtB/SzmUci+KmMGW+A/nH+ARXQRZ6fN4M3N/IJY9bqrdu9atdq9+iByKd90reuWhgVdaVe/bYP3dgFnrqmLzaJjHPSQbjEUPhNzVg+qbJOH4qk3+XHc/7mniXID29u7A1JP6FtH9Ji28bE5y22jWGnbuTF9JKeKemLP+gxJx2EYRiGcSwPb27cvqMwDMMwjpbQ384+iMvhqsibG7y5gaV1/7xn/LrExB0fll2ySBesGw/V/drjlSUnz/zxts/MOtvzvtOzCeU905d9ddg5B+EaQ+EwUXj+bJumzjyIhL0bJjWYWGYGH2Fmxmf6Gd/Ziup21rp4vqU01zAMw0hrI9+11o07Gqpecp5nGHEKtC+fUR3nHEReZ3y38ygMwzBeVZHdzjyIU96SVDmODzDzAWYMim85y7ZYdGHlM9alr5aWWFrR9t7EUv+t73yY++M3L8+06v2yJMkib4cX06845yBcYyjs3vrEa05NZx5E8Q6rj0inl4XzrGfGZ/YZ378Wj6iMtQGy3lxRUx2mFBafy7nPMD7I8UlxQE3t90hUTeccRB5nfLf1KAzjDXnOcvpBxE1vLL3DjI/X+Mxuu9GmXsbae4tbSNLJ+ApPZWx8oH5ydO478Mhx1VCSfdNlZx2EawyFbUD+E+E5tZucPRRVeqx8Wu/8yvP+ZnCpmw/8o7oOayXbWi+XlOmnOQO2bKmX6w4GZv7qE9tbGnco1r7piEo65yBcYygkSSnhU72nd3HyQUiSfCaXjxjagSc+4TO3faqcfeOhzBtL6VTuO/C+M6etJYqeO1bOthyvas45CNcYCklKDvvFZ86jTj6IdEO/++dQJZ75N45L3XygqvZn3+ir3Y6r8bo7z/utrD3pi7HXDN/tOgjXGArpwiO/FJr3qJMPwq5QI8XwxCd85lZXW7NvrG/ZkuKwulGN8rzf6lqVvjjzmuG7XQfhGkOhhDZLi/72gJMPwsFxFeKJT/jM7T7LCsefm089LknF/M44fGBifEyJGnneb1d9kWxd2rPKo6mTDsI1huJ4yNo7l4Q4+SAcHN5apBFPfMJnbnf0T+2T8Qbs6oDyhyRpjN7blL5t70CNsuR5v52KHZtiXRqmHn5OOgiXGIqDLf6ptCrIupy25pJzDmLzZPsfnNI9tY03T/ybwSd68sFPbpyrosBtts+QPW9RtYuGYRjGsyr+nXXjT3ep/Y38bOj7KvuPYRjG2/KKcdpB5O0nN27LUSRUUu1D6Su/6QPnDEVHVZhg/eHp+EdV6iCf47sZFoP2u50Vaplly6pOJ737BNa+sG3r4mNq+ksZSVJit9/V+uH6nv8sna/gudavFDFe2ytJhzeXbSxJRd656u+4SW4QXbxzm3MLFuqNEU46iMsvHZF0YXmhNpK873/ZKUext7qa2d9o3bdj4GinDMXGsL3ye6hOtcNRE84X/vnBa44EmPHl8xmfYZzskX5Cg+am2bd+W9y6zWdUqm3LpYKZT//Uq/9JZ60f1/X6wmkHcSLTizGBzjmKLG+gDnTS+Uh8u7Ttbg/u42d1mfEx45OkjRu3HUyrXr1RcKbX5FdGbbPUDGie8RGyI6cd31sscs2fO12xeqdH49Z+zjuI+NNpGSs1izjlKNK2ZvprUvEuZ52PlL+XHLxUyj+g2XWeD+SG8OWX8JnyIBgKwndjeFcXAOEDAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhAwDCBwCEDwAIHwAQPgAgfABA+AAQPgAgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAOEDAMIHAIQPAAgfALghL4Ygb1a4wkGEcB6QxZ8MAeEDzCZEBoNw/SwuN1qbJhUNr3nb/5RQ+9IyNzxpinb+QdR2hYNwjaNwkYMgfO4841seKo3ZV+kGHmlYOJ0ArofLvbkxXlLKxLw+asWL7WoUKFq/8//FcUoBuN2Mr7gklcnbYxZ8uEaSUrZu/TniiTdqcVYBuNeM7wUfqdRTeXlEap9H19hXUr6v+wNnFYB7zfjqxy/07FQ4Dw9IDvtFkor6VkuKiU2RUnqeeZHzCsCdwqfST+ft/l0WSLpryAuFJB38YNIVGS95P8eJBeBGl7p5NWeBpPtjBxaSpMrfbCgtachRTiyA/Bu+CwMlNf+tWPp6vaWlpPMDObEA8m/4Rh6Sis8tkrGh3nhJM1ZxZgHk6ra/xnd0xcrdp0573Vu9eqNW9o1f/io1ec/hXqn9D0ph4ZKMsAT5fmG/4fz0XQcPHClerVq16q2L5rD7qZJeL+24pVPwGun75pxaAE4K35Ghk1MlSQeWS20/rW3b7PdSmpbU7Jlxv6HfSeWmSJIxWzqbvvnM558nSNLR3ZIKPvx646z73xYnlX0l87YRIdJ8fowDgHMudc+9XeO71IzVJfXety2FDpf03Hb7LQtHSJ4zy2d9fEyddxMy1pJ+bvJF1nvMk9Q+y4df7i8rHVvHqQXgjBnfvzVOSpLu9q95IXZ7gpQyNMR2CfrW2gW62GWD7T2JuF6GNKJl1scfb3NMUpG2Fcuc3rcv9rJkCc56l4WSHsiyzdLmB2lBEOcWgBPCd0eDSOm+D5uVlKTEiDFJMvpF+Vjj9H3D/drdd5YkKbnrGanj4GyP//Kg5BnR7w5J0qnPxyZ0b5T1LnslS2jWjQ/8IMVyagE45VL3C5/SX295pKQkqej7yz2kXeNtN5X8qaA0+3NJ0qsbpRqTsz98gqRvB1m7p9LDD3wZkfUeaaekcqWzbvWXdIJTC8Ap4auxLOY5T/ta8CuSNqevBYyVNGitpB++lgr/VDzboxOOSWV7Z6wXe6FK1rucSpNKZXtgKUnHObUAnBI+NSvhuDZY0m772jPh0pVupxTdX9K4+7I/+JKkclff/wlJ2SZ8Ks2MD4DzwpdZuZLSrozVLwOkQ0+e63xBeq5nTncvKEVvveoez0i6I9vWIp7WWwDA6eGTn5Rg/4yeCv5YQlp81y4p8NMcD81PuvLQt6lX2WFRSRezbU1Otd4CADn6X3w7y5kjl60LJyVdydhebepjhpKlUnN8cnzcm92lY/3GdGvRsGQue75L0ulsW0/bbgEAp4Tvty9j4y/mduMjb34oyWNalZxv7vbbFEm7hkv3NG7cLNAj5/CdyjF8ZTm1AJxzqbu8Ufvf9lzM/fZ+klQ2NLebJ39ie8t2/6yBQaVfz76jAiWkY0lZt+5nxgfAaeHb0H6TJBWrGRQcHBwcHFwiy+2Xe0jS0UG57uCVg1OeSH9jN2GUf2S2O1SQLv+VdeMySRU5tQCccal7qWOSVOHFPvbZV7tFme8wcK0k6fOgHrntonCvXjq4YVPUujOS9j80IyzL7W12SEvaZtm4WFJbTi0AZ8z4lh6Wyq95PderzpljJa++kvrtuNpuKnf+8LdTax6WpEGXs9z2mKw/r+to3y6pcBtOLQBnhG++pFG5/2rwnf0kRUwIly50Pn+NXVmCFi72lg7vybK9RQkpek7mbe9KeqAQpxaAM8J3QFJDh/XkXY63JnZOlB4fqLF1pd19rr23tk9Jismy0fsxSW9dcdy0dZqkLpxZAE4JX3FJ5xzWP41zvLXfTuneyVKhOcWkn0Zfe3cFJGX7etF3C0mxrztsONc3TWrYgzMLwCnhC5T1m0Jt1rzveOMXM6WCc+6QVPNbSW+syP74ruH7HdaSlkvK9i17Vd+U9ElG+c49uFGyfOXBmQXgtPBF2Mv3S2iiw21rB0r6PECS1G2AlNr9SNaHz/9xku9z8elr57vESPdm/9KCwfdKGvVYlCTJmNNkraR+jTmxAHJnMW7fvq+0XiVZOrxy351ph//+zPrRFZ0oI0mnGhySen5vu+PlZhulZn94S1KapxS4XpJ890gqULtWLb+K/x7/c84FSeP6Z/9T9rXdJ8nSNuBOn9g/t0tSxxk+1zy2jA9NL3PDk6Zo5x9EbVc4CNc4Chc5CEO4brfzc3zePzY8LOOXXySPNEmqETjddkvaE4ekOuPS71hgToME/T3os8wPfzg2TbocFZWxZWgO3VO1VQ9sl4zFi9M3hI/35LwCcNKlrspG2iZWaZL05Dr7dw28u0Qq+mPGLwmqOtkifT4z86M/Wf1YpoJV/OTdHP+U8itfdvh1Q5W/+o7uAXBe+FRn6R/tKnlIUrkXVk2zd+/Ie5Im+Dnc8bFBkv6TKslS0L6xya8HPulk/bYBr0qtp+9/JZc/peSnB/7rV0CSigdNjn0+v5yamDFdm/Qd/y/PUeCWs9z+FwYux50pVf7Gvx/v3OkzaXeXv2ag0w4lXL6nzHXv1fVf4/ux9wVJqjarUfaTxgtbLnUUvMbnfv4H38dXoOZNPbx48Xuua+papUq+OjGTw43QLrXWTN/WbON9PE0BtwsfbsClt4wuszzU8qUOSwesZDiAW4tP+rqmsUcbTvWQVHic519/MBwA4TOB5JEaZH2X597OmsR4AITPBPaeLtnRtviq5vKqNXBr8RrfzUmbflLSw77Sgd8XHQ8YUjX9hsRJm6NiqtQP6G37vb/rVkuydKpsndDNSJB059O57nafHk3/6ZOGXomnyjDQAOFzHTt6SlIx3+Rho1OltZN/amfd/kd4nKTo6OkfjbNO3cKjJSne+i00q6xfw9X83tzDVy190fuemHjCB3Cp60LqjIuIuF+KDxqZ2nbCXyGX+lo3vx8aV3vKtqSdMwJPdBogSRr7liwRH79ovbn52Iha6jil2lVmfBm/NMRXhxlngBmfK/3D0V+6tHLj0KNlJrWXlkRYL2xXDtOQ4T6Sn1/XUcO+atlNUquQbxJeKGZ7lM8ALd75WvPcd3tO5e3L1ZTEOAOEz+V8o8B55SR5vS1JSuydNnCk9RbPN72GPN+ivCRL0ML1oVLyq2l9G0lpG7ysX049NnPVWgamX+HaNxXQFUYYIHyup9M0x1/yMWt/nQ/sKwPn/zVhqCQFL1wdKi3+WpcnSjvPN7Q+4rXMVRsWyGACt/9SjSG4Bfr8mOmXG23WyxnfCOgxSFskScFaI2mWNDdZWpf+ZdIeuZyPjIngZcYXYMbngppm/l0gW9TAYS3AFr5Az7WGJWmepUL8og4Z4TuRkumhti9zKK5Y+6YjKskIA4TP1R1UVYe1CjotSSrmv3W336LzTcNemuEQvuI57qGSdtuX41WNIQW41HV1vtrpsBab/tGUYK3WbHXv6jH/woXtpatfbQ+VZf8NwsY+j6oMKUD4XF2A1jusbVBAevjWXJrv0bVcy4vzNqU2ueoeqmvLRdvi8hMVCjCkAOFzdY018qR95cI7CrSH7/fEFuUVppnrsv+izMzprH72W9vid2rOiAKEz+V1Dj7xnH1lcGwN2+9IqlEqerzCpM5eiyKvET710hjrux5Rcz2HMaIA4XN5nt8X/vkh628EPh32tefU9N+FFGxEenaWSrW5vMzjGr/698UyB18xJB1qn9TTlxEFbi3e1b05w2KlTfp4niR51xtq21p9anikf9+AGnFRE48X/Mz+cl7wArUuIylskWoVv/p+S4zp9eWGDrVWzT5S9h1GGSB8riT5o0uStNP6Lu7m9PCpU+NnFo+RJDWZnPHb5IKl7pL0uE9y0LX23LNk7/XrJTWaW5FhBgifK/GJOePwAWSHb5mqGPn35qgjpQPqt3T4Lb/NNqb5S9IdG5OuXbNHdkSuP1UtuG1BRhm41Swm/XZf1//1klc7afxORZc6Cn69pPvhzQ0AhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhA0D4AIDwAQDhAwDCBwCEDwAIHwAQPgAgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AhA8ACB8AED4AIHwA4IYshjn/u0PtS8vy9sAVrnD0ITxxkY3BEDDjA4BceTEEedPSFabpinb+QdR2hYNwjaNwkYMAMz4AIHwAQPgAED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhAwDCBwCEDwDhAwDCBwCEDwAIHwAQPgAgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAED64rJjR+xgE4DbwYghc1KZf5u6QbzUGAiB8JhH3yS8HGQWAS11TGfX5wbJ9F/ZhIABmfObxYpXmwR6az0AAhM88atViDAAudQGAGZ+LSJt+UtLDvtKB3xcdDxhSNf2GxEmbo2Kq1A/oXdq6vm61JEunypKk5BkJku58mvEDCJ8b2tFTkor5Jg8bnSqtnfxTO+v2P8LjJEVHT/9oXEdJUni0JMWPliStsr5t0fxeBhDgUtf91BkXEXG/FB80MrXthL9CLvW1bn4/NK72lG1JO2cEnug0QJI09i1ZIj5+0Xpz87ERtdRxCh/SA5jxueU/HP2lSys3Dj1aZlJ7aUmE9cJ25TANGe4j+fl1HTXsq5bdJLUK+SbhhWK2R/kM0OKdrzVn+ADC576+UeC8cpK83pYkJfZOGzjSeovnm15Dnm9RXpIlaOH6UCn51bS+jaS0DV4NrTPBpEx7ahnIaAKEzz10mlbIYW3W/jof2FcGzv9rwlBJCl64OlRa/LUuT5R2nm9ofcRrVzLtaBjhA/4Hl2oMwS3Q50fH7mmzXvbJGOFB2iJJCtYaSbOkucnSOgXleAI4HwAzPjfR1JJpdYsaOKwF2MIX6LnWsCTNs1SIX9QhI3wnUjI9tCiDCRA+t3RQVR3WKui0JKmY/9bdfovONw17aYZD+IozXACXuvmBr3Y6rMWqomzXuqs1W927esy/cGF76eqME0D48pEArXdY26CA9PCtuTTfo2u5lhfnbUptwjABhC8/aayRJ+0rF95RoD18vye2KK8wzbRf6QIgfPlD5+ATz9lXBsfW6G9dqlEqerzCpM5eiyIJH0D48hfP7wv//FC8JOl02NeeUwvbtgcbkZ6dpVJtLi/zaMwwAU7Du7o3Z1istEkfz5Mk73pDbVurTw2P9O8bUCMuauLxgp/ZX84LXqDWZSSFLVKtq7+bG/lNiqSNGjpBUqnhlRhogPC5jOSPLknSTuu7uJvTw6dOjZ9ZPEaS1GSyn/3ewVJ3SXrcJ/kaV7oL5lr/PypKkroRPoDwuQ6fmDMOH0B2+JapipF/b446UjqgfkvPjI3NNqb5S9IdG5MqXn2/nwy4lLFSnG+vAgifK6lQIbdbmjXLtqlAQ9uC/zVPix9DC9w+vLkBgPABAOEDAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhAwDCBwCEDwAIHwAQPgAgfAAIHwAQPgAgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHgPABAOEDAMIHAPmBxTDnf3eofWlZ3h64whWOPoQnLrIxGAJmfACQKy+GIG9ausI0XdHOP4jarnAQrnEULnIQYMYHAIQPAAgfAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhAwDCBwCEDwAIHwAQPgAgfABA+ACA8AEgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfALPzYghcVPIvC/frvoAnijAUAOEzib3tYiSt0ohvWzMYAJe6prCxaUzVL/5eM77O/jZzGA2A8JnBvpATz+75T9OgfptfNfrFMR4A4TOB9y50HOctSQXGPPTvB4wHQPhMYLlluMW6ZInQGsYDuLV4c8Ml9Tf80xf9vXemcJYAwudC0qaflPSwr3Tg90XHA4ZUTb8hcdLmqJgq9QN6l7aur1stydKpsiQpeUaCpDufznW3b2Usnr1SmpMEED5XsqOnJBXzTR42OlVaO/mndtbtf4THSYqOnv7RuI6SpPBoSYofLUla1UeS1Pze6/gTRiuAYQZuLV7juzl1xkVE3C/FB41MbTvhr5BLfa2b3w+Nqz1lW9LOGYEnOg2QJI19S5aIj1+03tx8bEQtdZxS7Tr+gA2j9X8MM3BrWQxz/neH2peW3fS+3nm3/7yjZSa1l1IiSj8nSStbGYOH+0hS6qhhV2Z1kySjTMLZYg4HsPyv5tex79hmJ/qNz3bSFO38EaztCgfhGkfhIgdh0r/KXOo6zzcKnFdOktfbkqTE3mkDR1pv8XzTa8jzLcpLsgQtXB8qJb+a1reRlLbBq6F1JpiUaU8tAzOt7mt1otVYxhcgfK6o07RCDmuz9tfJ+OzdwPl/TRgqScELV4dKi7/W5YnSzvMNrY947UqmHQ3LFL6Y0PjgXwswvMAtxmt8t0KfHx27p8162SdjhAdpiyQpWGskzZLmJkvrFJTjCci0+k+LQ0GLijG8ADM+V9TUkml1ixo4rAXYwhfoudawJM2zVIhf1CEjfCdSMj20qMPy2ocTWiykewDhcwsHVdVhrYJOS5KK+W/d7bfofNOwl2Y4hK94rntZ9nhim18LM5oAl7puwVc7HdZiVVG2a93Vmq3uXT3mX7iwvXT1a+zk1/aJD8+newDhcxMBWu+wtiH9E8jBWnNpvkfXci0vztuU2uQa+5jWJbnj3IKMJUD43ERjjTxpX7nwjgLt4fs9sUV5hWmm/Uo3N1/1Suk1m/dzAcLnNjoHn3jOvjI4tkZ/61KNUtHjFSZ19loUeY3wjRhgDJ5se/01ficjChA+l+f5feGfH4qXJJ0O+9pzavordcFGpGdnqVSby8s8Gl9tB2Pesowelf5OcadGKQwpcEuZ5V3dQ31y+4Ge0ExrlkmV8rTfYbHSJn08T5K86w21ba0+NTzSv29AjbioiccLfmZ/OS94gVqXkRS2SLWKX22vUdKKFfaVK0lFeaIChC/vKoUuva77heate8kfXZKkndar0c3p4VOnxs8sHiNJajLZz37vYKm7JD3uk3yNl/hkzHdc4XkKEL4b8eSy68mH5am87dUn5ozDdajDt0xVjPx7c9SR0gH1W3pmbGy2Mc1fku7YmFTxqrsd+Wqmc8SHmAHCd0Mqt76er2FpXSmPu61QIbdbmjXLtqlAQ9uC/zX2evfdPDOB28g8b248Zbn1Ez4AhM/Fp3zXMeGrzDMCIHzmmvIx4QMIn+mmfEz4AMJntikfEz6A8JluyseEDyB8ZpvyMeEDCJ/ppnxM+ADCZ7YpHxM+gPCZbsrHhA8gfGab8jHhAwif6aZ8TPgAwme2KR8TPoDwmW7Kx4QPIHxmm/Ix4QMIX36e8rXKaWsrJnwA4cvHeuYw5bP05JkAED6TTfmY8AGEz2xTPiZ8AOEz3ZSPCR9A+Mw25WPCBxA+0035mPABhM9sUz4mfADhM92UjwkfQPjMNuVjwgcQPtNN+ZjwAYTPbFM+JnwA4TPdlI8JH0D4zDblY8IHED7TTfmY8AGEz2xTPiZ8AOEz05QvRJJCmPABhM9EelkkSy+eAQDhM9mUjwkfQPjMNuVjwgeYk8Uw73/7+3o77w9a4QpHHsITF9kYDMH18zLxfzvzPYAZHwCYgwdDAIDwAQDhAwDCBwCEDwAIHwAQPgAgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AIHwACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhAwDCB4DwAQDhAwDCBwCEDwAIHwAQPgAgfABA+ACA8AEA4QMAwgcAhA8ACB8AED4AIHwACB8AED4AIHwAQPgAgPABAOEDAMIHAIQPAAgfABA+ACB8AED4AIDwAQDhAwDCB4DwAQDhAwDCBwCEDwAIHwAQPgAgfADgEv4fNS7SXkUzLQcAAAAASUVORK5CYII="}}]); \ No newline at end of file diff --git a/assets/js/935f2afb.4d4bf693.js b/assets/js/935f2afb.4d4bf693.js new file mode 100644 index 000000000..2b13f98f0 --- /dev/null +++ b/assets/js/935f2afb.4d4bf693.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkuc_2_docs=self.webpackChunkuc_2_docs||[]).push([[53],{1109:e=>{e.exports=JSON.parse('{"pluginId":"default","version":"current","label":"Next","banner":null,"badge":false,"noIndex":false,"className":"docs-version-current","isLast":true,"docsSidebars":{"tutorialSidebar":[{"type":"category","label":"Educational Kits","collapsible":true,"collapsed":true,"items":[{"type":"category","label":"DiscoveryCore","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Optics and Imaging for Medical Photonics Students","href":"/docs/Toolboxes/DiscoveryCore/Opticsintro","docId":"Toolboxes/DiscoveryCore/Opticsintro"},{"type":"link","label":"openUC2 Smartphone Microscope with a finite corrected objective lens","href":"/docs/Toolboxes/DiscoveryCore/Smartphone Microscope","docId":"Toolboxes/DiscoveryCore/Smartphone Microscope"},{"type":"category","label":"CHINESE","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"uc2miniboxCN","href":"/docs/Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCN","docId":"Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCN"}]},{"type":"category","label":"ENGLISH","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"UC2 miniBOX (english)","href":"/docs/Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxEN","docId":"Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxEN"},{"type":"link","label":"Lens","href":"/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreLens","docId":"Toolboxes/DiscoveryCore/ENGLISH/CoreLens"},{"type":"link","label":"Telescope","href":"/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreTelescope","docId":"Toolboxes/DiscoveryCore/ENGLISH/CoreTelescope"},{"type":"link","label":"Microscope","href":"/docs/Toolboxes/DiscoveryCore/ENGLISH/coreMicroscope","docId":"Toolboxes/DiscoveryCore/ENGLISH/coreMicroscope"}]},{"type":"category","label":"FRENCH","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"UC2 miniBOX (fran\xe7ais)","href":"/docs/Toolboxes/DiscoveryCore/FRENCH/uc2miniboxFR","docId":"Toolboxes/DiscoveryCore/FRENCH/uc2miniboxFR"},{"type":"link","label":"Lentille","href":"/docs/Toolboxes/DiscoveryCore/FRENCH/CoreLensFR","docId":"Toolboxes/DiscoveryCore/FRENCH/CoreLensFR"},{"type":"link","label":"T\xe9lescope","href":"/docs/Toolboxes/DiscoveryCore/FRENCH/CoreTelescopeFR","docId":"Toolboxes/DiscoveryCore/FRENCH/CoreTelescopeFR"},{"type":"link","label":"Microscope","href":"/docs/Toolboxes/DiscoveryCore/FRENCH/coreMicroscopeFR","docId":"Toolboxes/DiscoveryCore/FRENCH/coreMicroscopeFR"}]},{"type":"category","label":"GERMAN","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"UC2 miniBOX (deutsch)","href":"/docs/Toolboxes/DiscoveryCore/GERMAN/uc2miniboxDE","docId":"Toolboxes/DiscoveryCore/GERMAN/uc2miniboxDE"},{"type":"link","label":"Linse","href":"/docs/Toolboxes/DiscoveryCore/GERMAN/CoreLens","docId":"Toolboxes/DiscoveryCore/GERMAN/CoreLens"},{"type":"link","label":"Teleskop","href":"/docs/Toolboxes/DiscoveryCore/GERMAN/CoreTelescope","docId":"Toolboxes/DiscoveryCore/GERMAN/CoreTelescope"},{"type":"link","label":"Mikroskop","href":"/docs/Toolboxes/DiscoveryCore/GERMAN/coreMicroscope","docId":"Toolboxes/DiscoveryCore/GERMAN/coreMicroscope"}]},{"type":"category","label":"SPANISH","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"core_intro","href":"/docs/Toolboxes/DiscoveryCore/SPANISH/core_intro","docId":"Toolboxes/DiscoveryCore/SPANISH/core_intro"}]}]},{"type":"category","label":"DiscoveryElectronics","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"openUC2 Electronics kit that enables automation","href":"/docs/Toolboxes/DiscoveryElectronics/Automation_intro","docId":"Toolboxes/DiscoveryElectronics/Automation_intro"},{"type":"link","label":"openUC2 Camera Setup","href":"/docs/Toolboxes/DiscoveryElectronics/Camera Setup","docId":"Toolboxes/DiscoveryElectronics/Camera Setup"},{"type":"link","label":"XYZ Micrometer Stage for Precise Motion Control","href":"/docs/Toolboxes/DiscoveryElectronics/XYZ_stage_mico","docId":"Toolboxes/DiscoveryElectronics/XYZ_stage_mico"},{"type":"link","label":"ESP32 XIAO Sense-based microscope","href":"/docs/Toolboxes/DiscoveryElectronics/seeedmicroscope","docId":"Toolboxes/DiscoveryElectronics/seeedmicroscope"},{"type":"link","label":"openUC2 *Spectrometer*","href":"/docs/Toolboxes/DiscoveryElectronics/spectrometer","docId":"Toolboxes/DiscoveryElectronics/spectrometer"}]},{"type":"category","label":"DiscoveryInterferometer","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"openUC2 Interferometer Introduction","href":"/docs/Toolboxes/DiscoveryInterferometer/Interferometer_intro","docId":"Toolboxes/DiscoveryInterferometer/Interferometer_intro"},{"type":"link","label":"openUC2 In-line holography","href":"/docs/Toolboxes/DiscoveryInterferometer/InlineHolography","docId":"Toolboxes/DiscoveryInterferometer/InlineHolography"},{"type":"link","label":"openUC2 Michelson Interferometer","href":"/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometer","docId":"Toolboxes/DiscoveryInterferometer/MichelsonInterferometer"},{"type":"link","label":"openUC2 Mach-Zender Interferometer","href":"/docs/Toolboxes/DiscoveryInterferometer/MachZenderInterferometer","docId":"Toolboxes/DiscoveryInterferometer/MachZenderInterferometer"},{"type":"link","label":"HIK-Camera Software Installation","href":"/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial","docId":"Toolboxes/DiscoveryInterferometer/SoftwareTutorial"}]},{"type":"category","label":"Building The CourseBOX","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"The Course BOX Alignment Procedure (Finite Optics)","href":"/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/","docId":"Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/Readme"},{"type":"link","label":"CourseBOX: Light Microscopy and Optical Alignment (Infinity Optics)","href":"/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/","docId":"Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/Readme"},{"type":"link","label":"MicroscopyCore","href":"/docs/Toolboxes/DiscoveryDiffraction/MicroscopyCore","docId":"Toolboxes/DiscoveryDiffraction/MicroscopyCore"}],"href":"/docs/Toolboxes/DiscoveryDiffraction/"},{"type":"category","label":"Polarisation Experiments","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Brewster Angle Experiment","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/Readme"},{"type":"link","label":"Circular Polarizer","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/Readme"},{"type":"link","label":"Crossed Polarizers","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/Readme"},{"type":"link","label":"Many Microscope Slides Experiment","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/Readme"},{"type":"link","label":"Newton\'s Rings Experiment","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/Readme"},{"type":"link","label":"Polarization Experiment using Optically Active Solution","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/Readme"},{"type":"link","label":"Stress Birefringence Experiment","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/Readme"},{"type":"link","label":"Three Polarizers (0, 45, 90 degrees)","href":"/docs/Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/","docId":"Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/Readme"}],"href":"/docs/Toolboxes/DiscoveryPolarization/"},{"type":"link","label":"Fluorescence Extension","href":"/docs/Toolboxes/DiscoveryFluorescence/","docId":"Toolboxes/DiscoveryFluorescence/README"},{"type":"category","label":"Phase Microscopy","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Differential Phase Contrast Microscopy","href":"/docs/Toolboxes/DiscoveryPhaseMicroscopy/DPCmicroscopy","docId":"Toolboxes/DiscoveryPhaseMicroscopy/DPCmicroscopy"}],"href":"/docs/Toolboxes/DiscoveryPhaseMicroscopy/"}],"href":"/docs/Toolboxes/"},{"type":"category","label":"Investigator","collapsible":true,"collapsed":true,"items":[{"type":"category","label":"ZMicroscope","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Unpack the openUC2 Z-Microscope","href":"/docs/Investigator/ZMicroscope/UpackZMicroscope","docId":"Investigator/ZMicroscope/UpackZMicroscope"}]},{"type":"category","label":"XYZMicroscope","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Aligning the Beamsplitter Cube","href":"/docs/Investigator/XYZMicroscope/AlignLaser","docId":"Investigator/XYZMicroscope/AlignLaser"},{"type":"link","label":"openUC2 FiveD v1","href":"/docs/Investigator/XYZMicroscope/FiveD_v1","docId":"Investigator/XYZMicroscope/FiveD_v1"},{"type":"link","label":"openUC2 FiveD v2","href":"/docs/Investigator/XYZMicroscope/FiveD_v2","docId":"Investigator/XYZMicroscope/FiveD_v2"},{"type":"link","label":"openUC2 FiveD v3","href":"/docs/Investigator/XYZMicroscope/FiveD_v3","docId":"Investigator/XYZMicroscope/FiveD_v3"},{"type":"link","label":"Histo Scanner Plugin Documentation","href":"/docs/Investigator/XYZMicroscope/HistoScan","docId":"Investigator/XYZMicroscope/HistoScan"},{"type":"link","label":"MCT (Multi-Colour Timelapse) Imaging Plugin","href":"/docs/Investigator/XYZMicroscope/MCTPlugin","docId":"Investigator/XYZMicroscope/MCTPlugin"},{"type":"link","label":"ROI Scanner","href":"/docs/Investigator/XYZMicroscope/ROIScanner","docId":"Investigator/XYZMicroscope/ROIScanner"},{"type":"link","label":"openUC2 Phase-Contrast Setup Tutorial","href":"/docs/Investigator/XYZMicroscope/SetupPhasecontrast","docId":"Investigator/XYZMicroscope/SetupPhasecontrast"},{"type":"link","label":"openUC2 Setting up the tube lens","href":"/docs/Investigator/XYZMicroscope/SetupTubelens","docId":"Investigator/XYZMicroscope/SetupTubelens"},{"type":"link","label":"Smart Microscopy Using openUC2 and ImSwitch","href":"/docs/Investigator/XYZMicroscope/SmartMicroscopy","docId":"Investigator/XYZMicroscope/SmartMicroscopy"},{"type":"link","label":"Stage Mapping and Stage Calibration","href":"/docs/Investigator/XYZMicroscope/StageCalibration","docId":"Investigator/XYZMicroscope/StageCalibration"},{"type":"link","label":"Stage Scanning and Image Stitching (ASHLAR)","href":"/docs/Investigator/XYZMicroscope/StageScanning","docId":"Investigator/XYZMicroscope/StageScanning"}]},{"type":"category","label":"Lightsheet","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"openUC2 Light-Sheet Microscope","href":"/docs/Investigator/Lightsheet/LightSheet","docId":"Investigator/Lightsheet/LightSheet"},{"type":"link","label":"openUC2 Light-Sheet Microscope (Old Version)","href":"/docs/Investigator/Lightsheet/LightSheetOld","docId":"Investigator/Lightsheet/LightSheetOld"},{"type":"link","label":"openUC2 Light-Sheet Tips and Tricks","href":"/docs/Investigator/Lightsheet/LightSheet Sample","docId":"Investigator/Lightsheet/LightSheet Sample"},{"type":"link","label":"Light-sheet alignment","href":"/docs/Investigator/Lightsheet/LightsheetCalibration","docId":"Investigator/Lightsheet/LightsheetCalibration"}]},{"type":"category","label":"STORM","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"U.C.*STORM*","href":"/docs/Investigator/STORM/Main","docId":"Investigator/STORM/Main"},{"type":"link","label":"Setting up the laser","href":"/docs/Investigator/STORM/Illumination","docId":"Investigator/STORM/Illumination"},{"type":"link","label":"Stability","href":"/docs/Investigator/STORM/Stability","docId":"Investigator/STORM/Stability"},{"type":"link","label":"Software","href":"/docs/Investigator/STORM/Software","docId":"Investigator/STORM/Software"},{"type":"link","label":"Electronics","href":"/docs/Investigator/STORM/Electronics","docId":"Investigator/STORM/Electronics"},{"type":"link","label":"Results","href":"/docs/Investigator/STORM/Results","docId":"Investigator/STORM/Results"}]},{"type":"link","label":"README","href":"/docs/Investigator/FlowStopper/","docId":"Investigator/FlowStopper/README"}]},{"type":"category","label":"Electronics","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Introduction","href":"/docs/Electronics/uc2e1","docId":"Electronics/uc2e1"},{"type":"link","label":"Hardware","href":"/docs/Electronics/uc2e2","docId":"Electronics/uc2e2"},{"type":"link","label":"Getting Started","href":"/docs/Electronics/uc2e3","docId":"Electronics/uc2e3"},{"type":"link","label":"REST principle","href":"/docs/Electronics/uc2e5","docId":"Electronics/uc2e5"},{"type":"link","label":"REST commands","href":"/docs/Electronics/uc2e5.1","docId":"Electronics/uc2e5.1"},{"type":"link","label":"Connecting devices","href":"/docs/Electronics/uc2e6","docId":"Electronics/uc2e6"},{"type":"link","label":"Controlling the UC2e","href":"/docs/Electronics/uc2e7","docId":"Electronics/uc2e7"},{"type":"link","label":"Compiling from Scratch","href":"/docs/Electronics/uc2e8","docId":"Electronics/uc2e8"},{"type":"link","label":"Replace Hardware","href":"/docs/Electronics/uc2e9","docId":"Electronics/uc2e9"},{"type":"link","label":"PS4-Controller","href":"/docs/Electronics/PS4-Controller","docId":"Electronics/PS4-Controller"},{"type":"link","label":"Python commands","href":"/docs/Electronics/uc2e5.2","docId":"Electronics/uc2e5.2"},{"type":"category","label":"APIDescription","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"UC2-ESP","href":"/docs/Electronics/APIDescription/INTRO","docId":"Electronics/APIDescription/INTRO"},{"type":"link","label":"AS 5311 linear encoder for real-time feedback loop","href":"/docs/Electronics/APIDescription/Encoder","docId":"Electronics/APIDescription/Encoder"},{"type":"link","label":"Home","href":"/docs/Electronics/APIDescription/Home","docId":"Electronics/APIDescription/Home"},{"type":"link","label":"LED array","href":"/docs/Electronics/APIDescription/LEDArray","docId":"Electronics/APIDescription/LEDArray"},{"type":"link","label":"Motor","href":"/docs/Electronics/APIDescription/Motor","docId":"Electronics/APIDescription/Motor"},{"type":"link","label":"PinConfig","href":"/docs/Electronics/APIDescription/PinConfig","docId":"Electronics/APIDescription/PinConfig"}]},{"type":"category","label":"UC2-ESP","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"UC2-ESP Firmware for the openUC2 UC2e electronics","href":"/docs/Electronics/UC2-ESP/Setup_Buildenvironment","docId":"Electronics/UC2-ESP/Setup_Buildenvironment"}]},{"type":"category","label":"UC2-REST","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"UC2-REST","href":"/docs/Electronics/UC2-REST/INTRO","docId":"Electronics/UC2-REST/INTRO"},{"type":"link","label":"UC2-REST: Messaging","href":"/docs/Electronics/UC2-REST/ESP32_Messaging_Callback","docId":"Electronics/UC2-REST/ESP32_Messaging_Callback"},{"type":"link","label":"UC2-REST: Motor","href":"/docs/Electronics/UC2-REST/ESP32_Motor","docId":"Electronics/UC2-REST/ESP32_Motor"}]}]},{"type":"category","label":"ImSwitch","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Install driver for Daheng Camera","href":"/docs/ImSwitch/DahengCamera","docId":"ImSwitch/DahengCamera"},{"type":"link","label":"ImSwitchClient Documentation","href":"/docs/ImSwitch/ImSwitchClient","docId":"ImSwitch/ImSwitchClient"},{"type":"link","label":"ImSwitchConfig","href":"/docs/ImSwitch/ImSwitchConfig","docId":"ImSwitch/ImSwitchConfig"},{"type":"link","label":"ImSwitch Experimental Features Documentation","href":"/docs/ImSwitch/ImSwitchExperimental","docId":"ImSwitch/ImSwitchExperimental"},{"type":"link","label":"Install ImSwitch","href":"/docs/ImSwitch/ImSwitchInstall","docId":"ImSwitch/ImSwitchInstall"},{"type":"link","label":"ImSwitchInstallUbuntu","href":"/docs/ImSwitch/ImSwitchInstallUbuntu","docId":"ImSwitch/ImSwitchInstallUbuntu"},{"type":"link","label":"ImSwitchInstallWindows","href":"/docs/ImSwitch/ImSwitchInstallWindows","docId":"ImSwitch/ImSwitchInstallWindows"},{"type":"link","label":"Install ImSwitch using the ImSwitch Installer (Electron updated Version)","href":"/docs/ImSwitch/ImSwitchInstaller","docId":"ImSwitch/ImSwitchInstaller"},{"type":"link","label":"Install ImSwitch using the ImSwitch Installer (CONDA INSTALLER OUTDATED)","href":"/docs/ImSwitch/ImSwitchInstallerConda","docId":"ImSwitch/ImSwitchInstallerConda"},{"type":"link","label":"ImSwitchUpdate","href":"/docs/ImSwitch/ImSwitchUpdate","docId":"ImSwitch/ImSwitchUpdate"}]},{"type":"category","label":"openUC2 Workshops","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"openUC2 Workshop at BioRTC in Nigeria","href":"/docs/WORKSHOPS/Workshop Nigeria","docId":"WORKSHOPS/Workshop Nigeria"}],"href":"/docs/WORKSHOPS/"},{"type":"category","label":"PRODUCTION","collapsible":true,"collapsed":true,"items":[{"type":"category","label":"INVESTIGATOR","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Assemble the XYZ Microscope","href":"/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscope","docId":"PRODUCTION/INVESTIGATOR/ProductionXYZMicroscope"}]},{"type":"category","label":"Modules","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"USB Camera","href":"/docs/PRODUCTION/Modules/Camera","docId":"PRODUCTION/Modules/Camera"},{"type":"link","label":"Eyepiece","href":"/docs/PRODUCTION/Modules/Eyepiece","docId":"PRODUCTION/Modules/Eyepiece"},{"type":"link","label":"LENS","href":"/docs/PRODUCTION/Modules/LENS","docId":"PRODUCTION/Modules/LENS"},{"type":"link","label":"KINEMATIC MIRROR (45\xb0)","href":"/docs/PRODUCTION/Modules/KIN_MIR_45","docId":"PRODUCTION/Modules/KIN_MIR_45"},{"type":"link","label":"KINEMATIC MIRROR (90\xb0)","href":"/docs/PRODUCTION/Modules/KIN_MIR_90","docId":"PRODUCTION/Modules/KIN_MIR_90"},{"type":"link","label":"KINEMATIC MIRROR (90\xb0)","href":"/docs/PRODUCTION/Modules/BEAMSPLITTER","docId":"PRODUCTION/Modules/BEAMSPLITTER"},{"type":"link","label":"Mirror (45\xb0)","href":"/docs/PRODUCTION/Modules/MIR_45","docId":"PRODUCTION/Modules/MIR_45"},{"type":"link","label":"Kinematic XY Mount","href":"/docs/PRODUCTION/Modules/KIN_XY_LASER","docId":"PRODUCTION/Modules/KIN_XY_LASER"},{"type":"link","label":"Kinematic XY Mount / Laser","href":"/docs/PRODUCTION/Modules/STAGE_Z_MANUAL","docId":"PRODUCTION/Modules/STAGE_Z_MANUAL"},{"type":"link","label":"Z-Stage Motorized NEMA12 25mm","href":"/docs/PRODUCTION/Modules/STAGE_Z_NEMA","docId":"PRODUCTION/Modules/STAGE_Z_NEMA"},{"type":"link","label":"Torch","href":"/docs/PRODUCTION/Modules/TORCH","docId":"PRODUCTION/Modules/TORCH"},{"type":"link","label":"Sample Holder","href":"/docs/PRODUCTION/Modules/SAMPLE_HOLDEr","docId":"PRODUCTION/Modules/SAMPLE_HOLDEr"},{"type":"link","label":"Polarization Rotator","href":"/docs/PRODUCTION/Modules/POLARIZER_ROTATING","docId":"PRODUCTION/Modules/POLARIZER_ROTATING"},{"type":"link","label":"Apertures","href":"/docs/PRODUCTION/Modules/APERTURES","docId":"PRODUCTION/Modules/APERTURES"}]}]},{"type":"link","label":"openUC2 Documentation","href":"/docs/intro","docId":"intro"}]},"docs":{"Electronics/APIDescription/Encoder":{"id":"Electronics/APIDescription/Encoder","title":"AS 5311 linear encoder for real-time feedback loop","description":"The relevant code can be found here:","sidebar":"tutorialSidebar"},"Electronics/APIDescription/Home":{"id":"Electronics/APIDescription/Home","title":"Home","description":"UC2-ESP Motor Homing Interface API Description","sidebar":"tutorialSidebar"},"Electronics/APIDescription/INTRO":{"id":"Electronics/APIDescription/INTRO","title":"UC2-ESP","description":"This is the API description for the UC2 firmware running on the ESP32 boards. It\'s under heavy active development. You can find the current version here:","sidebar":"tutorialSidebar"},"Electronics/APIDescription/LEDArray":{"id":"Electronics/APIDescription/LEDArray","title":"LED array","description":"This API provides a convenient method for controlling individual LEDs within a NeoPixel LED array using the UC2-ESP firmware. The interface facilitates the manipulation of LED colors and array display modes. It operates through JSON documents sent over USB serial communication.","sidebar":"tutorialSidebar"},"Electronics/APIDescription/Motor":{"id":"Electronics/APIDescription/Motor","title":"Motor","description":"This API provides a straightforward way to control and manage motors using the UC2-ESP firmware. The interface operates over USB serial communication and accepts JSON documents to control motor movements. The main endpoint for motor control is /motor_act.","sidebar":"tutorialSidebar"},"Electronics/APIDescription/PinConfig":{"id":"Electronics/APIDescription/PinConfig","title":"PinConfig","description":"UC2 System Version 2 and 3, and WEMOS Board Pinout Description","sidebar":"tutorialSidebar"},"Electronics/PS4-Controller":{"id":"Electronics/PS4-Controller","title":"PS4-Controller","description":"If you are using the webserial online flashing tool provided by UC2 (https://youseetoo.github.io/) to flash the firmware onto your ESP8266 or ESP32 development board, the process of connecting the PS4 controller to the UC2-ESP remains similar to the steps mentioned earlier. However, please note that the flashing tool is a separate tool for uploading firmware, and the Bluetooth communication with the PS4 controller needs to be implemented in your firmware code.","sidebar":"tutorialSidebar"},"Electronics/UC2-ESP/Setup_Buildenvironment":{"id":"Electronics/UC2-ESP/Setup_Buildenvironment","title":"UC2-ESP Firmware for the openUC2 UC2e electronics","description":"This refers to the UC2-ESP firmware that can be found here//github.com/youseetoo/uc2-esp32","sidebar":"tutorialSidebar"},"Electronics/UC2-REST/ESP32_Messaging_Callback":{"id":"Electronics/UC2-REST/ESP32_Messaging_Callback","title":"UC2-REST: Messaging","description":"This documentation covers the callback functionality integrated into the firmware, particularly focusing on the Message class. The Message class is designed to parse incoming messages from the ESP32, facilitating the conversion of hardware inputs and other events into software actions.","sidebar":"tutorialSidebar"},"Electronics/UC2-REST/ESP32_Motor":{"id":"Electronics/UC2-REST/ESP32_Motor","title":"UC2-REST: Motor","description":"This section provides detailed documentation on the Motor class designed for controlling motors via the firmware. The Motor class includes functionality for motor movement, triggering, position tracking, and stage scanning, among other features.","sidebar":"tutorialSidebar"},"Electronics/UC2-REST/INTRO":{"id":"Electronics/UC2-REST/INTRO","title":"UC2-REST","description":"This explains the basic functionality of the UC2-REST Python interface in conjunction with the UC2-ESP32 mainboard. This was mostly designed to interface with ImSwitch.","sidebar":"tutorialSidebar"},"Electronics/uc2e1":{"id":"Electronics/uc2e1","title":"Introduction","description":"Overview","sidebar":"tutorialSidebar"},"Electronics/uc2e2":{"id":"Electronics/uc2e2","title":"Hardware","description":"\ud83d\udd0c Board layout and schematics","sidebar":"tutorialSidebar"},"Electronics/uc2e3":{"id":"Electronics/uc2e3","title":"Getting Started","description":"First Steps, Getting Started, Flashing - Simply Quick Start!","sidebar":"tutorialSidebar"},"Electronics/uc2e5":{"id":"Electronics/uc2e5","title":"REST principle","description":"Introduction into the ESP32 microcontroller firmware","sidebar":"tutorialSidebar"},"Electronics/uc2e5.1":{"id":"Electronics/uc2e5.1","title":"REST commands","description":"Controlling hardware using the WebSerial Standard","sidebar":"tutorialSidebar"},"Electronics/uc2e5.2":{"id":"Electronics/uc2e5.2","title":"Python commands","description":"Using UC2-REST in Python","sidebar":"tutorialSidebar"},"Electronics/uc2e6":{"id":"Electronics/uc2e6","title":"Connecting devices","description":"Connect devices","sidebar":"tutorialSidebar"},"Electronics/uc2e7":{"id":"Electronics/uc2e7","title":"Controlling the UC2e","description":"Controlling the ESP32","sidebar":"tutorialSidebar"},"Electronics/uc2e8":{"id":"Electronics/uc2e8","title":"Compiling from Scratch","description":"UC2-ESP Firmware for the openUC2 UC2e electronics","sidebar":"tutorialSidebar"},"Electronics/uc2e9":{"id":"Electronics/uc2e9","title":"Replace Hardware","description":"\u274c Replacing parts","sidebar":"tutorialSidebar"},"ImSwitch/DahengCamera":{"id":"ImSwitch/DahengCamera","title":"Install driver for Daheng Camera","description":"Windows","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchClient":{"id":"ImSwitch/ImSwitchClient","title":"ImSwitchClient Documentation","description":"ImSwitchClient is a Python package designed to connect to the ImSwitch REST API, enabling remote control of ImSwitchUC2 functionalities directly from Jupyter Notebooks. This client facilitates easy integration with the ImSwitch ecosystem, offering programmable access to various features like laser control, stage manipulation, and image acquisition.","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchConfig":{"id":"ImSwitch/ImSwitchConfig","title":"ImSwitchConfig","description":"ImSwitch Config File","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchExperimental":{"id":"ImSwitch/ImSwitchExperimental","title":"ImSwitch Experimental Features Documentation","description":"Overview","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchInstall":{"id":"ImSwitch/ImSwitchInstall","title":"Install ImSwitch","description":"What will you learn?","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchInstaller":{"id":"ImSwitch/ImSwitchInstaller","title":"Install ImSwitch using the ImSwitch Installer (Electron updated Version)","description":"This is a work-in-progress installer. Please have a look for updates or file an issue here https://github.com/openUC2/ImSwitchInstaller/issues","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchInstallerConda":{"id":"ImSwitch/ImSwitchInstallerConda","title":"Install ImSwitch using the ImSwitch Installer (CONDA INSTALLER OUTDATED)","description":"This tutorial is outdated! Please look for the Standalone Electron-based version!","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchInstallUbuntu":{"id":"ImSwitch/ImSwitchInstallUbuntu","title":"ImSwitchInstallUbuntu","description":"ImSwitch Installation Ubuntu","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchInstallWindows":{"id":"ImSwitch/ImSwitchInstallWindows","title":"ImSwitchInstallWindows","description":"ImSwitch Installation on Windows","sidebar":"tutorialSidebar"},"ImSwitch/ImSwitchUpdate":{"id":"ImSwitch/ImSwitchUpdate","title":"ImSwitchUpdate","description":"Updated openUC2 ImSwitch","sidebar":"tutorialSidebar"},"intro":{"id":"intro","title":"openUC2 Documentation","description":"Here you can find all information to enhance, repair, improve, use, communicate,.... our optical toolbox openUC2. Did not find what you were looking for? No problem. Send us a mail or write an issue in our github repository https://github.com/openUC2/UC2-GIT/issues.","sidebar":"tutorialSidebar"},"Investigator/FlowStopper/README":{"id":"Investigator/FlowStopper/README","title":"README","description":"Setup Wifi Access Point on the Raspi","sidebar":"tutorialSidebar"},"Investigator/Lightsheet/LightSheet":{"id":"Investigator/Lightsheet/LightSheet","title":"openUC2 Light-Sheet Microscope","description":"In this experiment, we will explore the concept of optical sectioning to improve the resolution along the optical axis and the XY plane. The Light-Sheet Microscope, also known as the Light-Sheet Microscopy or Lattice Light-Sheet Microscopy, is a powerful technique used to acquire volumetric images of samples, such as zebrafishes. This technique enables us to visualize biological specimens in three dimensions with high resolution and minimal phototoxicity.","sidebar":"tutorialSidebar"},"Investigator/Lightsheet/LightSheet Sample":{"id":"Investigator/Lightsheet/LightSheet Sample","title":"openUC2 Light-Sheet Tips and Tricks","description":"Introduction to the openUC2 Light-Sheet Microscope","sidebar":"tutorialSidebar"},"Investigator/Lightsheet/LightsheetCalibration":{"id":"Investigator/Lightsheet/LightsheetCalibration","title":"Light-sheet alignment","description":"This tutorial will show you how to find the light-sheet and align this w.r.t. the camera plane.","sidebar":"tutorialSidebar"},"Investigator/Lightsheet/LightSheetOld":{"id":"Investigator/Lightsheet/LightSheetOld","title":"openUC2 Light-Sheet Microscope (Old Version)","description":"This is the manual for the Light sheet Microscope.","sidebar":"tutorialSidebar"},"Investigator/STORM/Electronics":{"id":"Investigator/STORM/Electronics","title":"Electronics","description":"Here we make use of the ESP32 Wemos D1 R32 microcontroller board in combination with the CNC Shield v3. The wiring of the different components is straight forward as the Stepper Motors are attached to the stepper drivers and the Laser is triggered by the SpinEn pin. The NeoPixel LED mounts to the Hold pin.","sidebar":"tutorialSidebar"},"Investigator/STORM/Illumination":{"id":"Investigator/STORM/Illumination","title":"Setting up the laser","description":"Laser illumination","sidebar":"tutorialSidebar"},"Investigator/STORM/Main":{"id":"Investigator/STORM/Main","title":"U.C.*STORM*","description":"---","sidebar":"tutorialSidebar"},"Investigator/STORM/Results":{"id":"Investigator/STORM/Results","title":"Results","description":"Imaging with the UC2-STORM setup","sidebar":"tutorialSidebar"},"Investigator/STORM/Software":{"id":"Investigator/STORM/Software","title":"Software","description":"For the control and acquisition software, we use ImSwitch. This is an open-source software centered around Napari as a multi-layer viewer and a rich framework for QT-based widgets. We make use of the open-source localization framework \\"microEye\\" ()","sidebar":"tutorialSidebar"},"Investigator/STORM/Stability":{"id":"Investigator/STORM/Stability","title":"Stability","description":"Setup stability","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/AlignLaser":{"id":"Investigator/XYZMicroscope/AlignLaser","title":"Aligning the Beamsplitter Cube","description":"The new xyz microscope has a special 2x1 cube that holds the fluorescence optics. Inside the beamsplitter cube is mounted kinematically and can be adjusted with 3 set screws. It\'s important that the fiber coupled laser is focussed / reimaged in the back focal plane of the objective lens. Therefore, we have created a little tutorial to get you starting how this works.","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/FiveD_v1":{"id":"Investigator/XYZMicroscope/FiveD_v1","title":"openUC2 FiveD v1","description":"Unpacking the microscope","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/FiveD_v2":{"id":"Investigator/XYZMicroscope/FiveD_v2","title":"openUC2 FiveD v2","description":"Design Files","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/FiveD_v3":{"id":"Investigator/XYZMicroscope/FiveD_v3","title":"openUC2 FiveD v3","description":"Design Files","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/HistoScan":{"id":"Investigator/XYZMicroscope/HistoScan","title":"Histo Scanner Plugin Documentation","description":"Welcome to the documentation page for the Histo Scanner Plugin, a powerful tool for scanning large areas and stitching images onto a large canvas. This page provides detailed information on how to configure and use the plugin effectively.","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/MCTPlugin":{"id":"Investigator/XYZMicroscope/MCTPlugin","title":"MCT (Multi-Colour Timelapse) Imaging Plugin","description":"More information are coming soon","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/ROIScanner":{"id":"Investigator/XYZMicroscope/ROIScanner","title":"ROI Scanner","description":"Starting ImSwitch on Ubuntu and Start the ROI Scanner","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/SetupPhasecontrast":{"id":"Investigator/XYZMicroscope/SetupPhasecontrast","title":"openUC2 Phase-Contrast Setup Tutorial","description":"Introduction: Understanding Phase-Contrast Microscopy","sidebar":"tutorialSidebar"},"Investigator/XYZMicroscope/SetupTubelens":{"id":"Investigator/XYZMicroscope/SetupTubelens","title":"openUC2 Setting up the tube lens","description":" - - + + \ No newline at end of file diff --git a/docs/Electronics/uc2e7/index.html b/docs/Electronics/uc2e7/index.html index 90f02fe03..341743b35 100644 --- a/docs/Electronics/uc2e7/index.html +++ b/docs/Electronics/uc2e7/index.html @@ -10,15 +10,15 @@ - - + +

Controlling the UC2e

Controlling the ESP32

The unified "REST-API" (inspired, not following full protocol), enables you to control the functionalities from multiple different clients (e.g. Python, Webrowser, Android Phone). The Core idea is to file post/get requests (serial/wifi) that send/receive JSON files that do "something".

Installing the USB Serial Driver Install the CH340 USB Serial driver is explained in more detail here: Sparkfun

🐍 Python Bindings

In order to interact with the electronics, we implemented a Python library called UC2-REST, available here that will help you to work with the device. The easiest way to install it would be:

pip install uc2-rest

It will automatically detect your UC2e (if the driver is installed), connect and will offer you the basic functionalities such as moving the motor, etc.

In order to give you a deep dive in what's possible, we provide a Jupyter Notebook that guides you through all the functionalities. You can find it here Start Jupiter Tutorial

📲 Android APP

This is coming soon. You will be able to control the electronics using the Wifi connection of your Android phone.

💻 Browser APP

If the ESP32 is offereing an access point or is connected to your wifi router, you can access the webserver running on the ESP32 using a browser. It offers limited control over the Endpoints by filing post and get requests.

More information are coming soon!

🎮 Playstation 3 or Playstation 4 Controller (comming soon)

With the open-source libraries PS3Controller and PS4Controller we are able to make use of the Bluetooth-able joysticks from your beloved game console.

When a PS4 controller is 'paired' to a PS4 console, it just means that it has stored the console's Bluetooth MAC address, which is the only device the controller will connect to. Usually, this pairing happens when you connect the controller to the PS4 console using a USB cable, and press the PS button. This initiates writing the console's MAC address to the controller.

Therefore, if you want to connect your PS4 controller to the ESP32, you either need to figure out what the Bluetooth MAC address of your PS4 console is and set the ESP32's address to it, or change the MAC address stored in the PS4 controller.

Whichever path you choose, you might want a tool to read and/or write the currently paired MAC address from the PS4 controller. You can try using sixaxispairer for this purpose.

If you opted to change the ESP32's MAC address, you'll need to include the ip address in the PS4.begin() function during within the setup() Arduino function like below where 1a:2b:3c:01:01:01 is the MAC address (note that MAC address must be unicast):

void setup()
{
PS4.begin("1a:2b:3c:01:01:01");
Serial.println("Ready.");
}

Controlling using ImSwitch

Please have a look here for more information about how to install ImSwitch and here for the UC2-related setup files including the UC2-REST serial interface.

- - + + \ No newline at end of file diff --git a/docs/Electronics/uc2e8/index.html b/docs/Electronics/uc2e8/index.html index 6619c0930..db23899b5 100644 --- a/docs/Electronics/uc2e8/index.html +++ b/docs/Electronics/uc2e8/index.html @@ -10,8 +10,8 @@ - - + +
@@ -19,7 +19,7 @@ 4.1. Go to Platformio Home and navigate to Devices 4.2 Copy the Device port (if connected) and insert that into the platformio.ini, e.g. upload_port = /dev/cu.SLAB_USBtoUART or COM3 for windows
  • Hit the PlatformIO upload button; The following task will be run: platformio run --target upload; The code is getting compiled and saved into ./.pio/build/ 5.1 The code will be uploaded. If everything goes right the terminal says: `Leaving... Hard resetting via RTS pin...``
  • open the PlatformIO serial monitor (remember to also change the port in the platform.io accordingly) and check the ESP32's output (eventually hit the reset button)
  • In case you have any problems: File an issue :-)
  • In order to test several commands, you can find a useful list of json files in this file: json_api_BD.txt

    V1: Source-code, Compiling and Binaries (Deprecated)

    The current version of the firmware can be found here: https://github.com/openUC2/UC2-REST/tree/master/ESP32

    Additional information on how to install and compile the board can be found in the README

    Precompiled binaries that can be installed through ImSwitch (more information coming soon) or the esptool.pycan be found here https://github.com/openUC2/UC2-REST/tree/master/ESP32/build

    V1: Install necessary software for UC2 rest (flash and interact) (Deprecated)

    Here you learn how to install the necessary software (Arduino IDE, drivers, ESP-IDF, Arduino libraries) that are necessary for the system to be working. Everything is explained in the video below.

    Additional information about the UC2 electronics and UC2-REST are provided here: https://github.com/openUC2/UC2-REST

    Download and install the software:

    To simplify life, we host a dropbox folder containing all the necessary drivers and Software pieces for this workshop. It will run on a Windows 10 64 Bit system:

    List of relevant files

    for the UC2-REST

    • Arduino IDE: arduino-1.8.18-windows.exe
    • ESP32 USB driver: CH341SER.exe
    • UC2 Rest firmware: UC2-REST.zip

    Alternative GitHub links that provide you with the latest version of the software:

    Steps to install the software

    1. Download all relevant files from the Dropbox folder above
    2. Install the Arduino IDE (including all drivers if you are asked during the installation)
    3. Install the CH340 USB Serial driver https://learn.sparkfun.com/tutorials/how-to-install-ch340-drivers/all
    4. Extract BenesArduinoLibraries-master.zip to /User/$USER$/Documents/Aduino/libraries
    5. Open the Arduino IDE and add the ESP32 board configuration. For this you need to add the following URL to the settings tag: https://dl.espressif.com/dl/package_esp32_index.json, http://arduino.esp8266.com/stable/package_esp8266com_index.json. For additional information please have a look in this tutorial
    6. Once done, open the Board manager and add the ESP32version 2.0.3
    7. Unzip the folder UC2-REST and open the file /ESP32/main/main.ino
    8. Select the board, the port and hit the compile and upload button
    9. IMPORTANT when setting up the build + upload, make sure you add this setting for the partition scheme (and potentially all others if not already set as default):

    The system accepts different hardware configurations (pins, devices, etc.). All of this is defined in the pindef_XXXX.h. Please have a look in the UC2-REST repository for additional information: https://github.com/openUC2/UC2-REST

    VIDEO Tutorial: Steps to install the software

    - - + + \ No newline at end of file diff --git a/docs/Electronics/uc2e9/index.html b/docs/Electronics/uc2e9/index.html index ddfbb5817..88182f2b3 100644 --- a/docs/Electronics/uc2e9/index.html +++ b/docs/Electronics/uc2e9/index.html @@ -10,13 +10,13 @@ - - + + - - + + \ No newline at end of file diff --git a/docs/ImSwitch/DahengCamera/index.html b/docs/ImSwitch/DahengCamera/index.html index f3f7adffe..1cfac3f35 100644 --- a/docs/ImSwitch/DahengCamera/index.html +++ b/docs/ImSwitch/DahengCamera/index.html @@ -10,14 +10,14 @@ - - + +

    Install driver for Daheng Camera

    Windows

    Have a look here: https://www.get-cameras.com/requestdownload and install the drivers. / SDK (newer version of ImSwitch ships drivers.)

    Linux

    ARM

    You can use the camera on the Raspberry Pi or Jetson Nano. For this you can do the following steps:

    cd ~
    cd Downloads
    wget https://dahengimaging.com/downloads/Galaxy_Linux-armhf_Gige-U3_32bits-64bits_1.5.2303.9202.zip
    cd Galaxy_Linux-armhf_Gige-U3_32bits-64bits_1.5.2303.9202
    chmod +x Galaxy_camera.run
    sudo ./Galaxy_camera.run
    # go through questionaire
    sudo reboot

    Install Python bindings

    cd ~/Downlodas
    wget https://dahengimaging.com/downloads/Galaxy_Linux_Python_2.0.2106.9041.tar.gz
    tar -xvf Galaxy_Linux_Python_2.0.2106.9041.tar.gz
    cd ~/Downlodas/Galaxy_Linux_Python_2.0.2106.9041/api
    # conda activate ****ENV
    pip install -e .
    cd ~/Downlodas/Galaxy_Linux_Python_2.0.2106.9041/api
    python ~/Downloads/Galaxy_Linux_Python_2.0.2106.9041/sample/GxSingleCamMono GxSingleCamMono.py

    The result will be:

    /home/uc2/Downloads/Galaxy_Linux_Python_2.0.2106.9041/sample/GxSingleCamMono/GxSingleCamMono.py:19: SyntaxWarning: "is" with a literal. Did you mean "=="?
    if dev_num is 0:

    -------------------------------------------------------------
    Sample to show how to acquire mono image continuously and show acquired image.
    -------------------------------------------------------------

    Initializing......

    Frame ID: 0 Height: 3036 Width: 4024

    Sample Script

    # version:1.0.1905.9051
    import gxipy as gx
    from PIL import Image


    def main():
    # print the demo information
    print("")
    print("-------------------------------------------------------------")
    print("Sample to show how to acquire mono image continuously and show acquired image.")
    print("-------------------------------------------------------------")
    print("")
    print("Initializing......")
    print("")

    # create a device manager
    device_manager = gx.DeviceManager()
    dev_num, dev_info_list = device_manager.update_device_list()
    if dev_num is 0:
    print("Number of enumerated devices is 0")
    return

    # open the first device
    cam = device_manager.open_device_by_index(1)

    # exit when the camera is a color camera
    if cam.PixelColorFilter.is_implemented() is True:
    print("This sample does not support color camera.")
    cam.close_device()
    return

    # set continuous acquisition
    cam.TriggerMode.set(gx.GxSwitchEntry.OFF)

    # set exposure
    cam.ExposureTime.set(10000)

    # set gain
    cam.Gain.set(10.0)

    # start data acquisition
    cam.stream_on()

    # acquire image: num is the image number
    num = 1
    for i in range(num):
    # get raw image
    raw_image = cam.data_stream[0].get_image()
    if raw_image is None:
    print("Getting image failed.")
    continue

    # create numpy array with data from raw image
    numpy_image = raw_image.get_numpy_array()
    if numpy_image is None:
    continue

    # print height, width, and frame ID of the acquisition image
    print("Frame ID: %d Height: %d Width: %d"
    % (raw_image.get_frame_id(), raw_image.get_height(), raw_image.get_width()))

    # stop data acquisition
    cam.stream_off()

    # close device
    cam.close_device()

    if __name__ == "__main__":
    main()
    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchClient/index.html b/docs/ImSwitch/ImSwitchClient/index.html index d3344cde8..d4cd1fd38 100644 --- a/docs/ImSwitch/ImSwitchClient/index.html +++ b/docs/ImSwitch/ImSwitchClient/index.html @@ -10,13 +10,13 @@ - - + +

    ImSwitchClient Documentation

    ImSwitchClient is a Python package designed to connect to the ImSwitch REST API, enabling remote control of ImSwitchUC2 functionalities directly from Jupyter Notebooks. This client facilitates easy integration with the ImSwitch ecosystem, offering programmable access to various features like laser control, stage manipulation, and image acquisition.

    PyPI Version

    Features

    Installation

    To install ImSwitchClient, use the following pip command:

    pip install imswitchclient

    Quick Start Example

    This example demonstrates basic usage of ImSwitchClient for moving a positioner and acquiring an image.

    import imswitchclient.ImSwitchClient as imc
    import numpy as np
    import matplotlib.pyplot as plt
    import time

    # Initialize the client
    client = imc.ImSwitchClient()

    # Retrieve the first positioner's name and current position
    positioner_names = client.positionersManager.getAllDeviceNames()
    positioner_name = positioner_names[0]
    currentPositions = client.positionersManager.getPositionerPositions()[positioner_name]
    initialPosition = (currentPositions["X"], currentPositions["Y"])

    # Define and move to a new position
    newPosition = (initialPosition[0] + 10, initialPosition[1] + 10)
    client.positionersManager.movePositioner(positioner_name, "X", newPosition[0], is_absolute=True, is_blocking=True)
    client.positionersManager.movePositioner(positioner_name, "Y", newPosition[1], is_absolute=True, is_blocking=True)

    # Acquire and display an image
    time.sleep(0.5) # Allow time for the move
    lastFrame = client.recordingManager.snapNumpyToFastAPI()
    plt.imshow(lastFrame)
    plt.show()

    # Return the positioner to its initial position
    client.positionersManager.movePositioner(positioner_name, "X", initialPosition[0], is_absolute=True, is_blocking=True)
    client.positionersManager.movePositioner(positioner_name, "Y", initialPosition[1], is_absolute=True, is_blocking=True)

    Contributing

    Contributions to ImSwitchClient are welcome! Please refer to the project's GitHub repository for contribution guidelines: https://github.com/openUC2/imswitchclient/.

    License

    ImSwitchClient is licensed under the MIT License. For more details, see the LICENSE file in the project repository.

    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchConfig/index.html b/docs/ImSwitch/ImSwitchConfig/index.html index 590753b04..9f012d735 100644 --- a/docs/ImSwitch/ImSwitchConfig/index.html +++ b/docs/ImSwitch/ImSwitchConfig/index.html @@ -10,13 +10,13 @@ - - + +

    ImSwitchConfig

    ImSwitch Config File

    This is a sample uc2_hik_histo.jsonconfiguration file:

    {
    "positioners": {
    "ESP32Stage": {
    "managerName": "ESP32StageManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "isEnable": true,
    "enableauto": false,
    "stepsizeX": -0.3125,
    "stepsizeY": -0.3125,
    "stepsizeZ": 0.3125,
    "homeSpeedX": 15000,
    "homeSpeedY": 15000,
    "homeSpeedZ": 15000,
    "isDualaxis": true,
    "homeDirectionX": 1,
    "backlashXOld": 15,
    "backlashYOld": 40,
    "backlashX": 0,
    "backlashY": 0,
    "homeEndstoppolarityY": 0,
    "homeDirectionY": -1,
    "homeDirectionZ": 0,
    "homeXenabled": 1,
    "homeYenabled": 1,
    "homeZenabled": 0,
    "initialSpeed": {
    "X": 15000,
    "Y": 15000,
    "Z": 15000
    }
    },
    "axes": [
    "X",
    "Y",
    "Z"
    ],
    "forScanning": true,
    "forPositioning": true
    }
    },
    "rs232devices": {
    "ESP32": {
    "managerName": "ESP32Manager",
    "managerProperties": {
    "host_": "192.168.43.129",
    "serialport": "COM3"
    }
    }
    },
    "lasers": {
    "LED": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index": 1
    },
    "wavelength": 635,
    "valueRangeMin": 0,
    "valueRangeMax": 1023
    }
    },
    "detectors": {
    "WidefieldCamera": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "HikCamManager",
    "managerProperties": {
    "isRGB": 1,
    "cameraListIndex": 0,
    "cameraEffPixelsize": 0.2257,
    "hikcam": {
    "exposure": 0,
    "gain": 0,
    "blacklevel": 100,
    "image_width": 1000,
    "image_height": 1000
    }
    },
    "forAcquisition": true,
    "forFocusLock": true
    },
    "Observer": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "OpenCVCamManager",
    "managerProperties": {
    "cameraListIndex": 1,
    "cameraListIndexWIN": 0,
    "isRGB":1,
    "opencvcam": {
    "exposure": 10
    }
    },
    "forAcquisition": true
    }
    },
    "autofocus": {
    "camera": "WidefieldCamera",
    "positioner": "ESP32Stage",
    "updateFreq": 10,
    "frameCropx": 780,
    "frameCropy": 400,
    "frameCropw": 500,
    "frameCroph": 100
    },
    "mct": {
    "monitorIdx": 2,
    "width": 1080,
    "height": 1920,
    "wavelength": 0,
    "pixelSize": 0,
    "angleMount": 0,
    "patternsDirWin": "C:\\Users\\wanghaoran\\Documents\\ImSwitchConfig\\imcontrol_slm\\488\\",
    "patternsDir": "/users/bene/ImSwitchConfig/imcontrol_sim/488"
    },
    "dpc": {
    "wavelength": 0.53,
    "pixelsize": 0.2,
    "NA": 0.3,
    "NAi": 0.3,
    "n": 1.0,
    "rotations": [
    0,
    180,
    90,
    270
    ]
    },
    "webrtc": {},
    "PixelCalibration": {},
    "focusLock": {
    "camera": "WidefieldCamera",
    "positioner": "ESP32StageManager",
    "updateFreq": 4,
    "frameCropx": 0,
    "frameCropy": 0,
    "frameCropw": 0,
    "frameCroph": 0
    },
    "LEDMatrixs": {
    "ESP32 LEDMatrix": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDMatrixManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "Nx": 4,
    "Ny": 4,
    "wavelength": 488,
    "valueRangeMin": 0,
    "valueRangeMax": 32768
    }
    }
    },
    "availableWidgets": [
    "Settings",
    "View",
    "Recording",
    "Image",
    "Laser",
    "Positioner",
    "Autofocus",
    "MCT",
    "UC2Config",
    "ImSwitchServer",
    "PixelCalibration",
    "HistoScan",
    "LEDMatrix",
    "Joystick",
    "Flatfield",
    "ROIScan"
    ],
    "nonAvailableWidgets": [
    "STORMRecon",
    "DPC",
    "Hypha",
    "FocusLock",
    "HistoScan",
    "FocusLock",
    "FOVLock"
    ]
    }

    Configuration File Documentation

    Overview

    This configuration file is designed to manage settings and properties of various components in a complex system, such as positioners, RS232 devices, lasers, detectors, autofocus settings, etc. It is structured in JSON format for ease of reading and editing.

    Sections

    1. Positioners

      • ESP32Stage
        • managerName: Specifies the manager responsible for handling this positioner, in this case, ESP32StageManager.
        • managerProperties: Contains detailed settings for the positioner, such as RS232 device identification, step sizes for different axes, home speeds, axis enable/disable settings, and other mechanical properties.
        • axes: Lists the axes controlled by this positioner (X, Y, Z).
        • forScanning & forPositioning: Boolean flags to indicate if the positioner is used for scanning and/or positioning.
    2. RS232 Devices

      • ESP32
        • managerName: The manager handling RS232 devices, here ESP32Manager.
        • managerProperties: Network and port settings for the RS232 device.
    3. Lasers

      • LED
        • Details for managing LED laser settings, including the manager name (ESP32LEDLaserManager), RS232 device reference, channel index, wavelength, and value range.
    4. Detectors

      • WidefieldCamera & Observer
        • Configuration for different camera detectors, including manager names (HikCamManager, OpenCVCamManager), properties like RGB support, camera indexes, pixel size, and acquisition settings.
    5. Autofocus

      • Configuration for autofocus feature, linking a camera with a positioner and setting parameters like update frequency and frame cropping dimensions.
    6. MCT (Multichannel Tissue)

      • Settings for monitor index, dimensions, wavelength, pixel size, angle mount, and directories for pattern files.
    7. DPC (Differential Phase Contrast)

      • Settings related to DPC imaging, including wavelength, pixel size, numerical aperture, refractive index, and rotation angles.
    8. WebRTC

      • An empty section possibly reserved for WebRTC configuration.
    9. Pixel Calibration

      • An empty section likely intended for pixel calibration settings.
    10. Focus Lock

      • Focus lock settings similar to autofocus but with its distinct configuration.
    11. LED Matrixes

      • ESP32 LEDMatrix
        • Configuration for LED matrixes, specifying manager details, RS232 device, dimensions, wavelength, and value range.
    12. Available Widgets

      • A list of widgets that are available in the system, indicating the features or components that can be controlled or monitored.
    13. Non-Available Widgets

      • A list of widgets that are not available, possibly indicating features not supported or deactivated in the current setup.

    Conclusion

    This configuration file is a comprehensive document that outlines the settings and parameters for various hardware and software components in a specialized system. It is critical for ensuring the correct operation of the equipment it is designed to control.

    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchExperimental/index.html b/docs/ImSwitch/ImSwitchExperimental/index.html index a7caedbf1..883ebe714 100644 --- a/docs/ImSwitch/ImSwitchExperimental/index.html +++ b/docs/ImSwitch/ImSwitchExperimental/index.html @@ -10,15 +10,15 @@ - - + +

    ImSwitch Experimental Features Documentation

    Overview

    This document details the new experimental features for the microscopy control software, ImSwitch. These features include a headless version of the software, suitable for resource-constrained environments, and a Docker container setup for easy deployment and testing. The headless version allows operation on resource-limited devices, while the Docker container facilitates easy deployment and testing. Please provide feedback and report any issues encountered to help improve these experimental features.

    Headless Version in Google Colab

    We have developed a headless version of ImSwitch that operates without the need for the QT graphical interface. This version allows remote control and UI element access solely through the REST API or Jupyter Notebook. Please note that some functions are still under development, and this version is experimental.

    Use Cases

    This headless version is particularly useful on devices such as Raspberry Pis and Nvidia Jetsons, which may struggle with the resource demands of installing and running PyQT.

    Getting Started in Google Colab

    To try the headless version of ImSwitch in Google Colab, follow these steps:

    1. Install ImSwitch:

      !pip install https://github.com/openUC2/ImSwitch/archive/refs/heads/NOQT.zip #--no-deps ##--force-reinstall
    2. Access the public version in Google Colab: Google Colab Link

    3. Clone the repository and checkout the NOQT branch:

      %cd ~
      !git clone https://github.com/openUC2/ImSwitch
      !git pull
      %cd ./ImSwitch
      !git checkout NOQT
    4. Install the package:

      !pip install -e .
    5. Configure and run ImSwitch in headless mode:

      from google.colab.output import eval_js
      print(eval_js("google.colab.kernel.proxyPort(8002)"))
      from imswitch.__main__ import main
      import imswitch
      imswitch.IS_HEADLESS = True
      main(is_headless=True, default_config="example_virtual_microscope.json")
      input() # Prevent from closing the cell

    Docker Container

    A Docker container is available for ImSwitch, providing a convenient way to deploy and test the software.

    Docker Container Details

    Running the Docker Container

    1. Launch the Docker container:

      docker run -it --rm -p 9876:9876 -p 8001:8001 -p 2222:22 imswitch
    2. Access the GUI and REST API:

      • Open your browser and go to localhost:9876 to access the GUI.
      • Go to localhost:8001 to access the REST API.

    Notes

    • This Docker setup is primarily a demo version to freeze system dependencies.
    • The next step involves integrating actual hardware for complete functionality.
    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchInstall/index.html b/docs/ImSwitch/ImSwitchInstall/index.html index 352d81606..de65ee2dd 100644 --- a/docs/ImSwitch/ImSwitchInstall/index.html +++ b/docs/ImSwitch/ImSwitchInstall/index.html @@ -10,13 +10,13 @@ - - + +

    Install ImSwitch

    What will you learn?

    • How to install the main GUI software to control the Optics components
    • How to intsall the drivers

    Download the software

    Duration: 1

    Please go to our ImSwitch Repository and download the latest Build-artefact:

    GitHub Actions -> "bundle"

    Extract the ZIP folder

    Duration: 2

    Right click on the downloaded ZIP folder and select "extract all". This may take a while.

    Download and Install the drivers for the Camera/UC2 Electronics board

    Electronics Board (CH340)

    For the CH340 driver, please follow these instructions

    The driver is available here

    Download the Windows CH340 Driver
    Unzip the file
    Run the installer which you unzipped
    In the Device Manager when the CH340 is connected you will see a COM Port in the Tools > Serial Port menu, the COM number for your device may vary depending on your system.

    Electronics Board (CP210x)

    The driver for the CP210x is available here

    Download the Windows CP210x Driver
    Unzip the file
    Run the installer which you unzipped
    In the Device Manager when the CP210x is connected you will see a COM Port in the Tools > Serial Port menu, the COM number for your device may vary depending on your system.

    Daheng Imaging Cameras

    Download the Windows SDK USB2+USB3+GigE (including Directshow + Python) Galaxy V1.18.2208.9301 for the Daheng USB3 Cameras from the Get-Cameras Website

    Install the packages on your computer.

    Duration: 2

    Prepare ImSwitch

    1. Connect the Camera with your computer (optionally test its proper functioning using Daheng Galaxy Viewer(x64)) using the USB3 cable
    2. Connect the UC2 electronics with the computer (USB micro) and hook up the 12V power supply with the power connection
    3. Check if you can see the USB COM Port in your device manager:

    Duration: 2

    Install the UC2 ImSwitch Configurations

    In order to use the UC2 Hardware and the Daheng USB Camera, you need the UC2 config files. Please go to https://github.com/openUC2/ImSwitchConfig/tree/stable and download the Repository as a zip file following this link.

    Once it has been downloaded, unzip it to C:\Users\YOURUSERNAME\Documents\ImSwitchConfig

    It should look like this:

    Duration: 2

    Start ImSwitch

    1. Open a Windows Terminal by typing WIN+R, then type CMD and hit enter.

    1. in the Windows Terminal navigate to the folder where you downloaded the softare - e.g. cd C:\Users\UC2\Downloads\imswitch-windows-latest\ImSwitch and hit enter
    2. start the executable BUT we need to add one comment in advance:set SETUPTOOLS_USE_DISTUTILS=stdlib
    3. Type ImSwitch.exe and hit enter, the executable will open the ImSwitch GUI

    If everything has been configured correctly, the GUI should open and look like this. Additional information on its functionality can be found in the Read-The-Docs: https://imswitch.readthedocs.io/en/stable/

    If you have any additional questions or issues, please post them in the ISSUE section here.

    Explanatory Video on how to get started with ImSwitch

    Duration: 3

    https://www.youtube.com/watch?v=Om6GWZZ_0So

    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchInstallUbuntu/index.html b/docs/ImSwitch/ImSwitchInstallUbuntu/index.html index c6fd1e2f6..5193a7db4 100644 --- a/docs/ImSwitch/ImSwitchInstallUbuntu/index.html +++ b/docs/ImSwitch/ImSwitchInstallUbuntu/index.html @@ -10,13 +10,13 @@ - - + +

    ImSwitchInstallUbuntu

    ImSwitch Installation Ubuntu

    Step 1: Install Visual Studio Code (VS Code)

    1. Open a web browser and navigate to the VS Code download page.
    2. Download the Debian package for your 64-bit system.
    3. Once downloaded, open a terminal window and navigate to the directory where the .deb file is located.
    4. Run the following command to install VS Code:
      sudo dpkg -i <filename>.deb
      sudo apt-get install -f

    Step 2: Install Miniconda

    1. Open a terminal window and run the following command to download Miniconda:
      wget https://repo.anaconda.com/miniconda/Miniconda3-py310_23.5.2-0-Linux-x86_64.sh
    2. Make the script executable and run it:
      bash Miniconda3-py310_23.5.2-0-Linux-x86_64.sh
    3. Follow the on-screen instructions to complete the installation.
    4. Create a new environment named imswitch with Python 3.10:
      conda create -n imswitch python=3.10 -y

    Step 3: Clone Necessary Repositories

    1. Navigate to the Downloads directory:
      cd ~/Downloads
    2. Clone the required repositories:
      git clone https://github.com/openUC2/UC2-REST
      git clone https://github.com/openUC2/ImSwitch
      git clone https://gitlab.com/bionanoimaging/nanoimagingpack
    1. Activate the imswitch environment:
      conda activate imswitch
    2. Navigate to the ImSwitch directory and install it:
      cd ~/Downloads/imswitch
      pip install -e .
    3. Repeat for UC2-REST and nanoimagingpack:
      cd ~/Downloads/UC2-REST
      pip install -e .
      cd ~/Downloads/nanoimagingpack # Correcting typo from original logs
      pip install -e .

    Step 5: Install Camera Drivers

    1. Clone the camera drivers:
      cd ~/Downloads
      git clone https://github.com/hongquanli/octopi-research/
    2. Navigate to the camera drivers directory and run the installation script:
      cd octopi-research/software/drivers\ and\ libraries/daheng\ camera/Galaxy_Linux-x86_Gige-U3_32bits-64bits_1.2.1911.9122/
      ./Galaxy_camera.run

    Step 6: Clone ImSwitch Configuration and Set Permissions

    1. Navigate to the Documents directory:
      cd ~/Documents
    2. Clone the ImSwitch configuration:
      git clone https://github.com/openUC2/ImSwitchConfig
    3. Change the ownership of the device:
      sudo chown pi:pi /dev/ttyUSB0

    Congratulations! You have successfully installed ImSwitch and related dependencies.

    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchInstallWindows/index.html b/docs/ImSwitch/ImSwitchInstallWindows/index.html index 0745fb90b..6d41928e6 100644 --- a/docs/ImSwitch/ImSwitchInstallWindows/index.html +++ b/docs/ImSwitch/ImSwitchInstallWindows/index.html @@ -10,14 +10,14 @@ - - + +

    ImSwitchInstallWindows

    ImSwitch Installation on Windows

    Step 1: Install Visual Studio Code (VS Code)

    1. Open a web browser and go to the VS Code download page.
    2. Download the Windows Installer.
    3. Once the download is complete, locate the installer and double-click to run it.
    4. Follow the on-screen instructions to complete the installation.

    Step 2: Install Miniconda

    1. Open a web browser and navigate to the Miniconda download page.
    2. Download the Windows installer for the 64-bit version of Miniconda.
    3. Run the installer by double-clicking the downloaded file.
    4. Follow the installer prompts to install Miniconda to a directory of your choice (e.g., C:\Miniconda3).
    5. During installation, ensure that the option to "Add Anaconda to my PATH environment variable" is selected.

    Step 3: Clone Necessary Repositories

    1. Ensure you have installed GIT for windows
    2. Open the Command Prompt:
      • Press Win + R, type cmd, and press Enter.
    3. Navigate to your preferred directory where you want to clone the repositories (e.g., C:\Users\<YourUsername>\Downloads):
      cd C:\Users\<YourUsername>\Downloads
    4. Clone the required repositories:
      git clone https://github.com/openUC2/UC2-REST
      git clone https://github.com/openUC2/ImSwitch
      git clone https://gitlab.com/bionanoimaging/nanoimagingpack
    1. Open the Command Prompt.
    2. Create a new Conda environment named imswitch with Python 3.10:
      conda create -n imswitch python=3.10
    3. Activate the imswitch environment:
      conda activate imswitch
    4. Navigate to the ImSwitch directory and install it: (The e just states to install the packages in editable mode)
      cd C:\Users\<YourUsername>\Downloads\ImSwitch
      pip install -e .
    5. Repeat for UC2-REST and nanoimagingpack:
      cd C:\Users\<YourUsername>\Downloads\UC2-REST
      pip install -e .
      cd C:\Users\<YourUsername>\Downloads\nanoimagingpack
      pip install -e .

    Step 5: Install Camera Drivers for Daheng Cameras

    1. Download the galaxy camera sdk for windows here https://www.get-cameras.com/requestdownload
    2. Double-click on the installation executable file (Galaxy_camera.exe) to run it.
    3. Follow the on-screen instructions to complete the installation.

    Step 6: Clone ImSwitch Configuration and Set Permissions

    1. Navigate to the Documents directory using the Command Prompt:
      cd C:\Users\<YourUsername>\Documents
    2. Clone the ImSwitch configuration:
      git clone https://github.com/openUC2/ImSwitchConfig

    Step 7: Start

    conda activate imswitch
    imswitch

    Problems and Solutions

    Very likely, there will be a PyQt issue.

    Try:

    pip install PyQt5 --force-reinstall

    Congratulations! You have successfully installed ImSwitch and related dependencies on Windows.

    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchInstaller/index.html b/docs/ImSwitch/ImSwitchInstaller/index.html index 6341a21df..4360ba75a 100644 --- a/docs/ImSwitch/ImSwitchInstaller/index.html +++ b/docs/ImSwitch/ImSwitchInstaller/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -20,7 +20,7 @@ :::

    ImSwitch Installer

    Introduction

    The ImSwitch Installer is an Electron-based application designed to streamline the installation process of the ImSwitch environment. It automates the download and setup of a Mamba environment along with all necessary dependencies from GitHub via pip. This tool simplifies the installation process, reducing it to a few clicks and eliminating the need for executing multiple complex commands.

    Installation Process

    The following youtube video shows you how to install ImSwitch using the installer. IMAGE ALT TEXT HERE

    Features

    • Easy Installation: Downloads and installs all necessary dependencies automatically.
    • Cross-Platform Support: Available for ARM64 and Intel-based Mac, Windows, and Linux systems.
    • Simple Uninstallation: Remove by deleting the ImSwitch folder.
    • Security: Instructions provided for bypassing system security warnings due to unsigned code.
    • Update Mechanism: Integrated update functionality to easily fetch the latest versions.

    Requirements

    • Disk Space: Minimum of 5GB.
    • Memory: 8GB RAM.
    • Processor: Intel i5 or Apple Silicon.

    Installation Guide

    1. Download the Installer: Choose the appropriate installer (ARM64 for Mac, Windows) from the releases section.
    2. Run the Installer: Double-click the downloaded file. Ignore any security warnings as the installer is not yet signed.
      • For macOS, follow Apple's guide for running unsigned code.
      • On Windows, grant permission to run the application.
    3. Installation Process: The installer will set up a Python environment using Mamba in /User/yourname/ImSwitch and install all dependencies. This process may take between 10 to 30 minutes depending on your internet connection.
    4. Starting ImSwitch: Once installed, launch ImSwitch by executing start imswitch. https://camo.githubusercontent.com/d12b826a278a7dcb877c8524a22cc3408b82883a27edc9464f865b7d5403e6ef/68747470733a2f2f69332e7974696d672e636f6d2f76692f4e345031734832453952552f6d617872657364656661756c742e6a7067Installation Screen 1Installation Screen 2

    Updating ImSwitch

    To update, click the update button within the application. This will download the latest ZIP from GitHub, unzip it, and execute pip install -e . within the Mamba base environment.

    Driver Installation

    Drivers for Daheng and HIK Vision cameras can be found under the "Driver Installation" link provided within the installer.

    Upcoming Features

    • Flashing the latest version of the UC2 firmware.
    • Building the ImSwitch Hardware Configuration using a drag-and-drop GUI.

    Install from Source

    For those interested in building from source:

    # Clone the repository
    git clone https://github.com/openuc2/imswitchinstaller.git

    # Install dependencies (skip if you already have yarn)
    npm install

    # Run the Electron app
    npm start

    Debugging

    Python Environment Locations

    • Windows Installation Path: C:\Users\UCadmin2\ImSwitch\miniforge\condabin
    • Python Executable: C:\\Users\\UCadmin2\\ImSwitch\\miniforge\\python.exe
    • ImSwitch Package Location: C:\\Users\\UCadmin2\\ImSwitch\\miniforge\\lib\\site-packages\\imswitch\\__init__.py'
    • Conda Environment /Users/ImSwitch/miniforge/condabin/mamba install devbio-napari -c conda-forge

    Disclaimer

    This installer is based on the BellJar project. Thanks a lot for making it open-source! :)

    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchInstallerConda/index.html b/docs/ImSwitch/ImSwitchInstallerConda/index.html index 126690777..6a231c39e 100644 --- a/docs/ImSwitch/ImSwitchInstallerConda/index.html +++ b/docs/ImSwitch/ImSwitchInstallerConda/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -22,7 +22,7 @@

  • Choose a path to install the conda environment to (Hint: path should not exceed 40 digits)

    1. Proceed

    1. Wait until the packages are installed

    1. A command prompt will appear and ask you for granting admin rights; Here a python script tries to download and install the silabs USB UART drivers for the ESP32

    2. The installation will tell you if the installation process scussful

    1. In the next steps, all necessary packages in the environemnt for ImSwitch will be downloaded and installed

    1. The installer informs you once it's done

    1. Once everything has been installed, the installer tells you it'S done

    1. Exit the installer by hitting finish

    1. On the desktop a new icon has been created to start the ImSwitch software. Double click and wait until the windows shows up

    Trouble shoot

    The conda installer installs your environment in the location that you have selected previously. To find out, you can open a command line window by hiting the keys WIN+r and type "cmd" and enter. Then enter

    conda env list

    The name imswitchopenuc2 should appear. You can activate this python environemnt by typing

    conda activate imswitchopenuc2

    If this works successfully, you can start imswitch by typing

    imswitch

    Disclaimer

    This is still very early stage and may have errors. Exepect Errors Feel free to file any issues in our repository or write us a mail. :)

    - - + + \ No newline at end of file diff --git a/docs/ImSwitch/ImSwitchUpdate/index.html b/docs/ImSwitch/ImSwitchUpdate/index.html index ca95e8bd9..8bcae88aa 100644 --- a/docs/ImSwitch/ImSwitchUpdate/index.html +++ b/docs/ImSwitch/ImSwitchUpdate/index.html @@ -10,13 +10,13 @@ - - + +

    ImSwitchUpdate

    Updated openUC2 ImSwitch

    In this guide, we'll walk you through the process of updating ImSwitch after you've installed it using pip. The update consists of three main steps:

    1. Updating the ImSwitch UC2 version
    2. Updating the UC2-REST
    3. Updating the UC2-ESP32 firmware

    1. Updating the ImSwitch UC2 Version

    Assumption: You have previously cloned the ImSwitch repository using git.

    1. Open your terminal.

    2. Activate the ImSwitch environment:

      conda activate imswitch
    3. Navigate to the directory where you cloned ImSwitch:

      cd <DIRECTORY/WHERE/YOU/DOWNLOADED/IMSWITCH>
    4. Pull the latest version from the repository and install:

      git pull https://github.com/openUC2/ImSwitch/
      pip install -e .

    2. Updating the UC2-REST to Interface the UC2 Electronics

    Assumption: You have previously cloned the UC2-REST repository using git.

    1. In the terminal, navigate to the directory where you cloned UC2-REST:

      cd <DIRECTORY/WHERE/YOU/DOWNLOADED/UC2-REST>
    2. Pull the latest version from the repository and install:

      git pull https://github.com/openUC2/UC2-REST/
      pip install -e .

    3. Updating the UC2-ESP32 Firmware

    1. Visit the UC2 Firmware Page.
    2. Select the board you're using. If you're uncertain about this, feel free to reach out via email.
    3. Click on the "Connect" button.
    4. From the browser-provided list, select the COM port.
    5. Click on "Flash Firmware".
    6. Wait for the installation process to complete.
    7. Test the firmware on the UC2 Web Serial Test Page.
    8. Close the browser window to release the serial port.

    Finally, you can start ImSwitch:

    python -m imswitch
    - - + + \ No newline at end of file diff --git a/docs/Investigator/FlowStopper/index.html b/docs/Investigator/FlowStopper/index.html index 5e608c7db..6f72df23f 100644 --- a/docs/Investigator/FlowStopper/index.html +++ b/docs/Investigator/FlowStopper/index.html @@ -10,13 +10,13 @@ - - + +

    README

    Setup Wifi Access Point on the Raspi

    from: https://cdn-learn.adafruit.com/downloads/pdf/setting-up-a-raspberry-pi-as-a-wifi-access-point.pdf

    sudo apt update
    sudo apt -y upgrade # takes long
    sudo apt install -y hostapd dnsmasq
    sudo systemctl unmask hostapd
    sudo systemctl enable hostapd
    sudo DEBIAN_FRONTEND=noninteractive apt install -y netfilter-persistent iptablespersistent
    sudo reboot
    sudo nano /etc/dhcpcd.conf
    -------
    interface wlan0
    static ip_address=192.168.4.1/24
    nohook wpa_supplicant
    sudo nano /etc/sysctl.d/routed-ap.conf
    -------
    sudo nano /etc/dnsmasq.conf
    -------
    interface=wlan0 # Listening interface
    dhcp-range=192.168.4.2,192.168.4.20,255.255.255.0,24h
    # Pool of IP addresses served via DHCP
    domain=wlan # Local wireless DNS domain
    address=/gw.wlan/192.168.4.1 # Alias for this router
    - - + + \ No newline at end of file diff --git a/docs/Investigator/Lightsheet/LightSheet Sample/index.html b/docs/Investigator/Lightsheet/LightSheet Sample/index.html index 75f4ddadd..ca8e65485 100644 --- a/docs/Investigator/Lightsheet/LightSheet Sample/index.html +++ b/docs/Investigator/Lightsheet/LightSheet Sample/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -32,7 +32,7 @@ The openUC2 light-sheet microscope features motorized axes for enhanced control and precision:
    • The Z-axis adjusts the objective lens focus relative to the light-sheet plane.
    • The X axis moves the sample in the vertical direction with respect to the ground surface.
    • The Y axis shifts the sample parallel to the light-sheet plane.
    • The A axis moves the sample along the light-sheet plane, towards or away from the objective lens.
    • Each step corresponds to approximately 300nm in physical units, enabling fine-tuned movement and positioning.

    We appreciate your engagement with the openUC2 light-sheet microscope and hope that these technical details enhance your understanding of the setup and its capabilities. Should you have any inquiries or require further assistance, please don't hesitate to reach out.

    Protocol to align the light-sheet w.r.t. the focus plane

    Alignment Protocol for Light-Sheet Microscope Focus Plane

    Efficient alignment of the light-sheet with the microscope objective lens's focus plane is crucial for optimal imaging results. This protocol outlines the steps to achieve precise alignment using fluorescent markers and manipulation of the kinematic mirror.

    Alignment Steps:

    1. Fluorescent Marker Setup:

      • Begin by ensuring that the light-sheet is coplanar with the microscope's objective lens field of view.
      • Use a fluorescent pen marker to label the embedding media, effectively visualizing the light-sheet.

    2. Activating the Laser:

    1. Visualizing the Light-Sheet:

      • With the laser activated, you should observe the light-sheet within the water chamber. Refer to the provided image for a reference.
    2. Kinematic Mirror Adjustment:

      • The three screws on the kinematic mirror in the right corner control the orientation of the light-sheet in 3D space.
      • Familiarize yourself with the degrees of freedom associated with these screws.

    1. Fundamental Considerations:
      • The cylindrical lens focuses the primary light-sheet in the backfocal plane of the illumination objective (4x, 0.1 NA).
      • Rotating the objective lens adjusts the orientation of the light-sheet.
      • The square orientation of the cylindrical lens ensures proper alignment with the detection objective lens.
      • The primary light-sheet exits the cylindrical lens at the center.
      • The kinematic mirror manipulates the light-sheet's position in the x and y directions, as well as introducing an offset.
      • Correct mirror alignment is crucial, placing it precisely at the diagonal center of the cube.
      • This central placement ensures that the primary light-sheet enters the objective lens's backfocal plane (BFP) at the center.
      • Such alignment results in the secondary illuminating light-sheet being parallel to the detection lens's focus plane.
      • Observe the effects of rotating the screws and adjust accordingly.

    1. Fluorescent Solution Application:
      • Utilize a syringe for convenient application of the fluorescent solution.

    7. Sample Cube Handling:

    • The sample cube is magnetically held, facilitating easy removal for cleaning.

    • Take care as the sample cube's coverslips are relatively thin and can break.

    Achieving precise alignment between the light-sheet and the objective lens's focus plane is critical for obtaining accurate imaging results. This protocol provides a systematic approach to optimizing your light-sheet microscope setup. For further assistance or questions, feel free to reach out to our community and support channels. Your engagement contributes to the ongoing refinement of the openUC2 light-sheet microscope system.

    Finding the the focus (waiste) of the light-sheet

    To effectively align the light-sheet in your setup, it's crucial to follow these two key steps:

    Step 1: Centering the Sheet within the Field of View (FOV)

    1. Begin by ensuring that the waist of the light-sheet is positioned at the center of the microscope's field of view (FOV).
    2. To achieve this, the cylindrical lens needs to be temporarily removed. Carefully release the lower puzzle pieces to detach the cylindrical lens cube.

    1. With the cylindrical lens removed, a collimated beam should enter the back focal plane (BFP) of the illuminating objective lens.
    2. Adjust the kinematic mirror to guide the round beam, approximately 10mm in diameter, into the center of the BFP of the illuminating objective lens. This alignment should be parallel to the optical axis.

    Step 2: Achieving Focus with the Detection Objective Lens

    1. Activate the camera, such as using Galaxy Viewer software that comes with the camera drivers, to observe the light-sheet's focus.
    2. The fluorescently labeled region should now exhibit a focused beam, perceptible to the naked eye.
    3. Initiate axial movement of the objective lens (Axis Z) using the online control website. You'll notice an increase in intensity at either the positive or negative direction until the light-sheet focus becomes visible within the field of view.

    1. To optimize focus, make fine adjustments to the kinematic mirror to direct the light-sheet beam if it's positioned too high or too low.
    2. It's common for the light-sheet's focus not to align precisely with the center of the FOV. In this case, carefully adjust the position of the illuminating objective lens along the cube axis to relocate the focus positions.
    3. Once you're content with the alignment, deactivate the laser and reinsert the cylindrical lens.
    4. Notably, this step doesn't need to be repeated each time the light-sheet is activated. The position of the cylindrical lens is relatively stable and doesn't require frequent recalibration.

    Following these steps meticulously will ensure that the light-sheet is accurately aligned both within the FOV's center and in-focus with the detection objective lens. This alignment process is essential for obtaining reliable and high-quality imaging results with the openUC2 light-sheet microscope.

    Once the cylindrical lens is back in, you can readjust the light-sheet wr.t. to the focus plane of the objective lens since they may be a slight variation after reassembly.

    Brightfield imaging

    In case you want to image the sample in transmisson mode, turn on the Neopixel LED that is connected to the sample cube and optionally remove the fluorescent filter by pulling it up and store it somewhere safe (dust and scratch free!).

    Using the Fully Assembled Light-Sheet Microscope for Sample Imaging

    Now that all components are meticulously aligned, the openUC2 light-sheet microscope is primed for sample imaging. Follow these steps to prepare and capture your fluorescent sample:

    1. Sample Preparation:

      • Begin by preparing your fluorescent sample according to the specified protocols.
      • Carefully follow the steps outlined in the dedicated sample preparation section within this document.
    2. Assembling the Sample Holder:

      • Loosen the nut that secures the syringe and insert the syringe into the sample holder.
      • Gradually lower the syringe so that the tip of the sample barely touches the light-sheet within the sample plane.

    1. Squeezing out the Agarose:
      • Squeeze out the agarose gently from the syringe while observing the sample, starting with brightfield imaging.
      • Monitor the camera's image stream to ensure the sample becomes visible within the field of view.

    1. Observing Brightfield Image:
      • If the sample isn't immediately visible, confirm its positioning within the sample cube and make minor adjustments in XYZ to bring it into view on the camera screen.
      • Once visible in brightfield, deactivate the LED light source.

    1. Switching to Laser Illumination:

      • Turn on the laser source, and initially, remove the fluorescent filter.
      • Adjust the imaging settings to enhance contrast and visibility, increasing intensity, exposure time, and/or camera gain until you obtain a clear, well-exposed image with minimal noise.
    2. Fine-tuning Laser Position:

      • Using bright scattering as a guide, locate the laser's position while ensuring you have reinserted the fluorescent filter.
      • Adjust the intensity as needed.
    3. Sample Positioning:

      • Manipulate the sample's position in XYZ space to center it on a region of interest.
    4. ImSwitch Scan and Reconstruction:

      • Utilize ImSwitch software's scan and reconstruction plugin to perform scans of your sample.
      • The specific scan and reconstruction process details are provided in the ImSwitch documentation.

    This completes the procedure for imaging your fluorescent sample using the fully assembled openUC2 light-sheet microscope. With careful preparation and precise adjustments, you can capture high-quality volumetric images that offer valuable insights into the structure and behavior of your sample. Your engagement with the microscope's capabilities contributes to ongoing advancements in microscopic research and exploration.

    ImSwitch data acquisition and Reconstruction

    We assume the system is running and you were able to install ImSwitch on your computer. The configuration JSONfile that describes the light-sheet system can be found further down this document. A tutorial on how to install our ImSwitch Version (SRC: https://github.com/openUC2/ImSwitch/) can be either found in the imSwitch repository or in the ImSwitch section in this wiki.

    Mount the sample on a metal tip

    Glue the sample on an M5 set screw using super glue or blutek (non-safe, sample can fall off). Insects offer a great level of fluorescent signal due to autofluorescence and act as nice training samples that can simply hang down using this method

    Sample preparation á la agarose-in-syringe method

    SRC

    Sample Preparation Protocol for openUC2 Light-Sheet Microscope Imaging: Fluorescently Labeled Zebrafish

    This simplified protocol outlines the steps to prepare a fluorescently labeled zebrafish sample for imaging using the openUC2 light-sheet microscope. This method involves embedding the sample in an agarose cylinder for stable imaging in an aqueous environment. the "aquarium" or water-filled sample chamber is used to do refractive index matching as the sample would scatter too much light otherwise.

    Materials Required:

    • 1.5% Agar
    • Glass capillary
    • Zebrafish embryo (some other volumetric, mostly clear sample that can be excited at 488nm)
    • Sample medium
    • Falcon tube or small beaker
    • syringe or FEP tube (optional, for increased stability)

    Procedure:

    Mounting in Free-Hanging Agarose Cylinder:

    1. Take the syringe and cut away the tip
    2. Melt 1.5% agar at 70ºC and maintain it at 37ºC.
    3. Insert the plunger into the syringe capillary, ensuring the white end barely protrudes and suck in enough agarose
    4. Gently place the zebrafish embryo into the already solidified agarose, minimizing the water content.
    5. Pull the plunger to draw up about 3cm (1 inch) of melted agarose.
    6. Carefully position the sample close to the capillary's end.
    7. Allow the agarose to set for 1-2 minutes.
    8. When ready to image, gently push the plunger down to extrude the agarose cylinder with the sample, placing it just outside the capillary for imaging.

    Further tweaks for the system

    These steps are not necessary, but help you to customize the microscope to better match your sample configuration.

    Remove the xyz stage from the top

    In case you want to do maintenance on the microscope, the xyz stage can easily be removed by releasing the M3x55mm screws from the bottom part. Therfore, remove the puzzle piece that has be mounted below the objective lens and release the 3 screws that mount the stage plate to the uppoer part of the microscope. You can now release the stage. In order to move it back on, do the reverse process.

    Swap the sample mounting plate

    In principle the XYZ stage can mount any sample geometry. We wanted to start with something and adapted the common syringe mount. Only two screws from below have to be released in order to swap the sample mount plate:

    This part can be customized to adapt e.g. conventional sample slides

    ImSwitch configuration for the ligth-sheet

    {
    "positioners": {
    "ESP32Stage": {
    "managerName": "ESP32StageManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "enableauto": 0,
    "isEnable": 1
    },
    "axes": [
    "X",
    "Y",
    "Z",
    "A"
    ],
    "forScanning": true,
    "forPositioning": true
    }
    },
    "rs232devices": {
    "ESP32": {
    "managerName": "ESP32Manager",
    "managerProperties": {
    "host_": "192.168.43.129",
    "serialport_": "COM3",
    "serialport": "/dev/cu.usbserial-A50285BI"
    }
    }
    },
    "lasers": {
    "488 Laser": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index":1,
    "filter_change": false,
    "laser_despeckle_period": 10,
    "laser_despeckle_amplitude": 0
    },
    "wavelength": 488,
    "valueRangeMin": 0,
    "valueRangeMax": 1024
    },
    "LED Matrix": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index": "LED",
    "filter_change": false,
    "filter_axis": 3,
    "filter_position": 32000,
    "filter_position_init": -0
    },
    "wavelength": 635,
    "valueRangeMin": 0,
    "valueRangeMax": 255
    }
    },
    "detectors": {
    "WidefieldCamera": {
    "ExtPackage": "imswitch_det_webcam",
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "GXPIPYManager",
    "managerProperties": {
    "cameraListIndex": 1,
    "gxipycam": {
    "exposure": 20,
    "gain": 0,
    "blacklevel": 10,
    "image_width": 1000,
    "image_height": 1000
    }
    },
    "forAcquisition": true,
    "forFocusLock": true
    }
    },
    "rois": {
    "Full chip": {
    "x": 600,
    "y": 600,
    "w": 1200,
    "h": 1200
    }
    },
    "LEDMatrixs": {
    "ESP32 LEDMatrix": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDMatrixManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "Nx": 4,
    "Ny": 4
    },
    "wavelength": 488,
    "valueRangeMin": 0,
    "valueRangeMax": 32768
    }
    },
    "autofocus": {
    "camera": "WidefieldCamera",
    "positioner": "ESP32Stage",
    "updateFreq": 10,
    "frameCropx": 780,
    "frameCropy": 400,
    "frameCropw": 500,
    "frameCroph": 100
    },
    "uc2Config": {
    "defaultConfig": "pindefWemos.json",
    "defaultConfig2": "pindefUC2Standalon2.json",
    "defaultConfig1": "pindefUC2Standalon.json"
    },
    "mct": {
    "monitorIdx": 2,
    "width": 1080,
    "height": 1920,
    "wavelength": 0,
    "pixelSize": 0,
    "angleMount": 0,
    "patternsDirWin": "C:\\Users\\wanghaoran\\Documents\\ImSwitchConfig\\imcontrol_slm\\488\\",
    "patternsDir": "/users/bene/ImSwitchConfig/imcontrol_sim/488"
    },
    "dpc": {
    "wavelength": 0.53,
    "pixelsize": 0.2,
    "NA":0.3,
    "NAi": 0.3,
    "n": 1.0,
    "rotations": [0, 180, 90, 270]
    },
    "webrtc":{},
    "PixelCalibration": {},
    "availableWidgets": [
    "Settings",
    "Positioner",
    "View",
    "Recording",
    "Image",
    "Laser",
    "UC2Config",
    "Joystick",
    "Lightsheet",
    "LEDMatrix"
    ],
    "nonAvailableWidgets":[
    "STORMRecon",
    "LEDMatrix",
    "MCT",

    "ImSwitchServer",
    "PixelCalibration",
    "Hypha",
    "FocusLock",
    "HistoScan",

    "FocusLock"]
    }
    - - + + \ No newline at end of file diff --git a/docs/Investigator/Lightsheet/LightSheet/index.html b/docs/Investigator/Lightsheet/LightSheet/index.html index 7a26e1940..61d610024 100644 --- a/docs/Investigator/Lightsheet/LightSheet/index.html +++ b/docs/Investigator/Lightsheet/LightSheet/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -19,7 +19,7 @@ Z-stage for the objective lens

    Almost Fully assembled UC2 Lighthseet microscope

    Step 2: Light-Sheet Generation and Sample Preparation

    The fiber-coupled laser emits light at a wavelength of 488 nanometers, which is ideal for exciting fluorescent molecules commonly used in biological imaging, such as green fluorescent protein (GFP). The collimated laser beam passes through a cylindrical lens, creating a one-dimensional focus with a width of approximately 10 mm.

    The kinematic mirror allows precise control of the laser beam position, ensuring proper alignment. The lens further shapes the laser beam into an optical sheet, which is then directed into the sample plane by the four-fold objective lens.

    The sample, such as a zebrafish embryo, is held in a small aquarium filled with water. The sample is positioned such that the light sheet intersects it, and fluorescence signals are emitted only where the light sheet illuminates.

    Step 3: Image Acquisition

    Using the XYZ stage, move the sample in the focal plane of the ten-fold objective lens. The camera will capture images as the sample is moved, allowing you to create a three-dimensional stack of the object. The long working distance of the objective lens allows sufficient space between the lens and the sample, reducing the potential for photodamage and phototoxicity.

    Benefits of Light-Sheet Microscopy

    Light-sheet microscopy offers several advantages for imaging biological samples:

    • Optical sectioning: The light-sheet illuminates only the focal plane, minimizing background noise and out-of-focus signals.
    • Reduced phototoxicity: With the sample illuminated only in the focal plane, light-sheet microscopy reduces photodamage and photobleaching, allowing long-term imaging of live samples.
    • High-speed imaging: Light-sheet microscopy enables rapid volumetric imaging, capturing dynamic processes in real-time.
    • High resolution: The combination of optical sectioning and minimal scattering allows for high-resolution imaging, revealing fine cellular structures.

    Bill-of-Material

    This is a list of components that are used in the latest version of the openUC2 light-sheet microscope. This is subject to changes. If you are interested to build one of these devices and need a kit, please, don't hesitate to contact us via Mail, Github or other channels :) Find more information on www.openuc2.com

    CategoryAmountPartShopPrice (€)CommentQuantityURL/SourceAlternative
    External Parts1Cylindrical lens, comarThorolabs1501Link
    1Camera, monochrome, CMOSDaheng3501Link
    1Focusing stage, micrometer, motorized (NEMA12)China1001Haoran
    110x objective, NA0.3, long-working distanceUSA2501Link
    1XYZ stage, AliExpress, micrometerChina2501LinkLink
    3Motor for stageChina803LinkLink
    1Tube lensChina2001Link
    1Fiber laserChina2001HaoranLink
    1MirrorPGI51Link
    14x objective lens finiteChina101Haoran
    1Fiber CollimatorChina1001Haoran
    14BaseplatesopenUC2314
    8CubesopenUC2510
    1Solid baseplate (aluminium)openUC2-1
    1Excitation filter (Thorlabs)Thorolabs1201Link
    Inserts1Fiber Collimator MountopenUC251
    1Cylindrical Lens MountopenUC251
    145° Mirror Mount (kinematic)openUC2251
    1RMS Lens MountopenUC251
    1Sample mount (printed)openUC2301
    1Base for XYZ StageopenUC221
    1Sample mount for XYZ StageopenUC2151
    1Holder for Z-stage motorizedopenuc2301
    1Holder for TubelensopenUC2101
    1Holder for UC2 ElectronicsopenUC2301
    Electronics1Electronics, Powersupply, Stepper driveropenUC21001
    2USB cables (camera, micro)Germany602
    1Playstation ControllerGermany501
    1Box + Foam insertopenUC21001Link
    Labour & Shipping-Labour + Shipping-5001
    TOTAL---2790-

    The 3D printing files can be found here

    Conclusion

    Congratulations! You have successfully built a light-sheet microscope using the UC2 modular toolbox. This powerful technique allows you to acquire high-resolution three-dimensional images of samples like zebrafishes. With the ability to perform optical sectioning and minimal phototoxicity, light-sheet microscopy is a valuable tool for studying biological structures in 3D. You can now explore the fascinating world of 3D biological imaging and discover new insights into the complexities of life at the microscopic level. Happy imaging!

    - - + + \ No newline at end of file diff --git a/docs/Investigator/Lightsheet/LightSheetOld/index.html b/docs/Investigator/Lightsheet/LightSheetOld/index.html index 6d9f81011..88f50fc80 100644 --- a/docs/Investigator/Lightsheet/LightSheetOld/index.html +++ b/docs/Investigator/Lightsheet/LightSheetOld/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -34,7 +34,7 @@

    5. Use of filters
    • When using a correct filter between the Z-stage and the camera, it's possible to observe a fluorescent image of the sample.
    • Without filters you capture only the scattering image.

    Imaging with the light sheet microscope

    • The focus of the detection path can be finely adjusted using the Z-stage motor (GUI - Z).
    • Z-series can be acquired by moving the sample (GUI - X) through the focused light sheet plane - Move the sample-stage in both directions, using the lens tissue as a sample, to observe how the camera image changes.
    • To acquire an image: Choose "Start experiment" on the right side of the screen, click "Custom" on the top right side and then "Snap" on the bottom right side.
    • To acquire a z-stack use the tomographic mode:

    Results

    What can you see with the simplest possible light sheet setup:

    The result could look like this:

    Zebra fish embryo

    Participate!

    Do you want to show your own results? Do you have ideas for improvements? Let us know!

    - - + + \ No newline at end of file diff --git a/docs/Investigator/Lightsheet/LightsheetCalibration/index.html b/docs/Investigator/Lightsheet/LightsheetCalibration/index.html index 44ea684be..8f962478d 100644 --- a/docs/Investigator/Lightsheet/LightsheetCalibration/index.html +++ b/docs/Investigator/Lightsheet/LightsheetCalibration/index.html @@ -10,13 +10,13 @@ - - + + - - + + \ No newline at end of file diff --git a/docs/Investigator/STORM/Electronics/index.html b/docs/Investigator/STORM/Electronics/index.html index 787637161..d37d4b05b 100644 --- a/docs/Investigator/STORM/Electronics/index.html +++ b/docs/Investigator/STORM/Electronics/index.html @@ -10,13 +10,13 @@ - - + +

    Electronics

    Here we make use of the ESP32 Wemos D1 R32 microcontroller board in combination with the CNC Shield v3. The wiring of the different components is straight forward as the Stepper Motors are attached to the stepper drivers and the Laser is triggered by the SpinEn pin. The NeoPixel LED mounts to the Hold pin.

    Flashing the firmware

    Go to the website https://youseetoo.github.io/ and choose the CNC board as the hardware configuration to flash the latest version of the Firmware. The PS3 controller's MAC address has to be setup with the PS Pairing tool. The actual MAC Address is printed out on the Serial monitor while the Board is booting up.

    - - + + \ No newline at end of file diff --git a/docs/Investigator/STORM/Illumination/index.html b/docs/Investigator/STORM/Illumination/index.html index a30a63367..3fec7f41b 100644 --- a/docs/Investigator/STORM/Illumination/index.html +++ b/docs/Investigator/STORM/Illumination/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -112,7 +112,7 @@ represents 10 µm. Two CCPs have been zoomed in to plot the profiles along the red transparent line. Scale bar for the magnified regions of interest represents 200 nm.

    - - + + \ No newline at end of file diff --git a/docs/Investigator/STORM/Main/index.html b/docs/Investigator/STORM/Main/index.html index 75c849bb3..9dda87266 100644 --- a/docs/Investigator/STORM/Main/index.html +++ b/docs/Investigator/STORM/Main/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -83,7 +83,7 @@

    Mechanical stability of the setup

    See the mechanical stability section of the repository.

    Wide-field imaging, Live-cell imaging, Single molecule applications

    See the Results section of the repository.

    Get Involved

    This project is open so that anyone can get involved. You don't even have to learn CAD designing or programming. Find ways you can contribute in CONTRIBUTING

    License and Collaboration

    This project is open-source and is released under the CERN open hardware license. Our aim is to make the kits commercially available. We encourage everyone who is using our Toolbox to share their results and ideas, so that the Toolbox keeps improving. It should serve as a easy-to-use and easy-to-access general purpose building block solution for the area of STEAM education. All the design files are generally for free, but we would like to hear from you how is it going.

    You're free to fork the project and enhance it. If you have any suggestions to improve it or add any additional functions make a pull-request or file an issue.

    Please find the type of licenses here

    REMARK: All files have been designed using Autodesk Inventor 2019 (EDUCATION)

    Collaborating

    If you find this project useful, please like this repository, follow us on Twitter and cite the webpage or the publication! :-)

    - - + + \ No newline at end of file diff --git a/docs/Investigator/STORM/Results/index.html b/docs/Investigator/STORM/Results/index.html index ad776155d..8619a76a9 100644 --- a/docs/Investigator/STORM/Results/index.html +++ b/docs/Investigator/STORM/Results/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -57,7 +57,7 @@ represents 10 µm. Two CCPs have been zoomed in to plot the profiles along the red transparent line. Scale bar for the magnified regions of interest represents 200 nm.

    - - + + \ No newline at end of file diff --git a/docs/Investigator/STORM/Software/index.html b/docs/Investigator/STORM/Software/index.html index 18661e62f..b6e06d5b0 100644 --- a/docs/Investigator/STORM/Software/index.html +++ b/docs/Investigator/STORM/Software/index.html @@ -10,13 +10,13 @@ - - + +

    Software

    For the control and acquisition software, we use ImSwitch. This is an open-source software centered around Napari as a multi-layer viewer and a rich framework for QT-based widgets. We make use of the open-source localization framework "microEye" ()

    Installation

    For the installation we advise you to have a look at the ImSwitch repository here https://github.com/kasasxav/ImSwitch/

    After setting up ImSwitch, you can enable STORM reconstruction in real time using the MicroEye Plugin by adding the following configuration to the ImSwitch config file that is located in ~/Documents/ImSwitchConfig/config/imcontrol_options.json

    {
    "setupFileName": "example_uc2_storm_alliedvision.json",
    "recording": {
    "outputFolder": "./ImSwitch/ImSwitch/recordings",
    "includeDateInOutputFolder": true
    },
    "watcher": {
    "outputFolder": "/Users/bene/ImSwitchConfig/scripts"
    }
    }

    The setup file with the actual hardware configuration can be placed here:

    ~/Documents/ImSwitchConfig/imcontrol_setups/example_uc2_storm_alliedvision.json

    {
    "positioners": {
    "ESP32Stage": {
    "managerName": "ESP32StageManager",
    "managerProperties": {
    "rs232device": "ESP32"
    },
    "axes": [
    "X",
    "Y",
    "Z"
    ],
    "forScanning": true,
    "forPositioning": true
    }
    },
    "rs232devices": {
    "ESP32": {
    "managerName": "ESP32Manager",
    "managerProperties": {
    "host_": "192.168.43.129",
    "serialport_windows": "COM5",
    "serialport": "/dev/cu./dev/cu.SLAB_USBtoUART"
    }
    }
    },
    "lasers": {
    "488 Laser": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index": 1,
    "filter_change": false,
    "laser_despeckle_period": 10,
    "laser_despeckle_amplitude": 0
    },
    "wavelength": 488,
    "valueRangeMin": 0,
    "valueRangeMax": 1024
    },
    "635 Laser": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index": 2,
    "filter_change": false,
    "laser_despeckle_period": 10,
    "laser_despeckle_amplitude": 0
    },
    "wavelength": 635,
    "valueRangeMin": 0,
    "valueRangeMax": 1024
    },
    "LED": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index": "LED",
    "filter_change": false,
    "filter_axis": 3,
    "filter_position": 32000,
    "filter_position_init": -0
    },
    "wavelength": 635,
    "valueRangeMin": 0,
    "valueRangeMax": 255
    }
    },
    "detectors": {
    "WidefieldCamera": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "AVManager",
    "managerProperties": {
    "cameraListIndex": 1,
    "mocktype": "STORM",
    "mockstackpath": "/Users/bene/Downloads/New_SMLM_datasets/ROI_cos7MT_AF647fluopaint.tif",
    "avcam": {
    "exposure": 0,
    "gain": 0,
    "blacklevel": 100,
    "image_width": 1000,
    "image_height": 1000,
    "pixel_format": "Mono12"
    }
    },
    "forAcquisition": true,
    "forFocusLock": false
    }
    },
    "rois": {
    "Full chip": {
    "x": 600,
    "y": 600,
    "w": 1200,
    "h": 1200
    }
    },
    "LEDMatrixs": {
    "ESP32 LEDMatrix": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDMatrixManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "Nx": 4,
    "Ny": 4
    },
    "wavelength": 488,
    "valueRangeMin": 0,
    "valueRangeMax": 32768
    }
    },
    "autofocus": {
    "camera": "WidefieldCamera",
    "positioner": "ESP32Stage",
    "updateFreq": 10,
    "frameCropx": 780,
    "frameCropy": 400,
    "frameCropw": 500,
    "frameCroph": 100
    },
    "availableWidgets": [
    "Settings",
    "View",
    "Recording",
    "Image",
    "Laser",
    "Positioner",
    "Autofocus",
    "STORMRecon"
    ]
    }

    ImSwitch in Action

    Here you can find a tour on Youtube how to set up everything and what it can do.

    https://www.youtube.com/watch?v=r8f-wmeq5i0

    - - + + \ No newline at end of file diff --git a/docs/Investigator/STORM/Stability/index.html b/docs/Investigator/STORM/Stability/index.html index 4a622e7ae..d16cb7d93 100644 --- a/docs/Investigator/STORM/Stability/index.html +++ b/docs/Investigator/STORM/Stability/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -24,7 +24,7 @@ sigma value of the localized beads over the measurement duration. The sigma value correlates with the defocusing of the beads i.e. low changes in sigma suggest small fluctuations of the samples axial position.

    - - + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/AlignLaser/index.html b/docs/Investigator/XYZMicroscope/AlignLaser/index.html index 156e86abe..db09f35e7 100644 --- a/docs/Investigator/XYZMicroscope/AlignLaser/index.html +++ b/docs/Investigator/XYZMicroscope/AlignLaser/index.html @@ -10,13 +10,13 @@ - - + +
    -

    Aligning the Beamsplitter Cube

    The new xyz microscope has a special 2x1 cube that holds the fluorescence optics. Inside the beamsplitter cube is mounted kinematically and can be adjusted with 3 set screws. It's important that the fiber coupled laser is focussed / reimaged in the back focal plane of the objective lens. Therefore, we have created a little tutorial to get you starting how this works.

    - - +

    Aligning the Beamsplitter Cube

    The new xyz microscope has a special 2x1 cube that holds the fluorescence optics. Inside the beamsplitter cube is mounted kinematically and can be adjusted with 3 set screws. It's important that the fiber coupled laser is focussed / reimaged in the back focal plane of the objective lens. Therefore, we have created a little tutorial to get you starting how this works.

    + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/FiveD_v1/index.html b/docs/Investigator/XYZMicroscope/FiveD_v1/index.html index bff5fa233..80d542c84 100644 --- a/docs/Investigator/XYZMicroscope/FiveD_v1/index.html +++ b/docs/Investigator/XYZMicroscope/FiveD_v1/index.html @@ -10,13 +10,13 @@ - - + +
    -

    openUC2 FiveD v1

    Unpacking the microscope

    The hardcover plastic case contains all you need for the microscope:

    • USB micro cable
    • USB3 camera cable
    • 12V power-supply
    • Sweet treat (optional ;D)
    • The actual microscope
    • The objective lens
    • The Illumination unit
    • A heavy Box

    The actual Box looks like this:

    Optional Please also find the treat and make sure you provide yourself with enough sugar throughout this unpacking routine :-)

    The foam holds the microscope in place (the actual colour may differ from what you may see):

    Remove the foam parts (please keep them for later reuse) to end up like this here:

    Getting started

    Mounting the illumination unit

    For this you need a 2.5mm Hex key and the M3 cylindrical screws. Mount the LED Arm like so:

    It should look like this:

    Wiring up the microscope

    First of all we need to wire up the microscope. For this we will start with the 12V power supply. Unfortunately the powersocket is inside the case, hence you have to first eat some candy in order to better find the spot ;-)

    The USB Cable is permanently mounted to the ESP32 UC2e unit:

    Note: Please make sure you have sufficient USB Power. In case the full LED array is turning on, it may happen that the ESP's voltage drops and the USB serial connection fails. A reconnect will help.

    The same holds true for the USB connection to the microcontroller board. You need to hook it up like that:

    Once done, we continue with inserting the objective lens. Eventually the lens is already inserted and you just need to check if the lens is centered correctly

    Wire up the microscope to your computer

    In order to get the microscope working, we first need to install additional drivers. For the Daheng Camera, this would be:

    For additional information and an in-depth explanation for the UC2e system, please have a look here

    - - +

    openUC2 FiveD v1

    Unpacking the microscope

    The hardcover plastic case contains all you need for the microscope:

    • USB micro cable
    • USB3 camera cable
    • 12V power-supply
    • Sweet treat (optional ;D)
    • The actual microscope
    • The objective lens
    • The Illumination unit
    • A heavy Box

    The actual Box looks like this:

    Optional Please also find the treat and make sure you provide yourself with enough sugar throughout this unpacking routine :-)

    The foam holds the microscope in place (the actual colour may differ from what you may see):

    Remove the foam parts (please keep them for later reuse) to end up like this here:

    Getting started

    Mounting the illumination unit

    For this you need a 2.5mm Hex key and the M3 cylindrical screws. Mount the LED Arm like so:

    It should look like this:

    Wiring up the microscope

    First of all we need to wire up the microscope. For this we will start with the 12V power supply. Unfortunately the powersocket is inside the case, hence you have to first eat some candy in order to better find the spot ;-)

    The USB Cable is permanently mounted to the ESP32 UC2e unit:

    Note: Please make sure you have sufficient USB Power. In case the full LED array is turning on, it may happen that the ESP's voltage drops and the USB serial connection fails. A reconnect will help.

    The same holds true for the USB connection to the microcontroller board. You need to hook it up like that:

    Once done, we continue with inserting the objective lens. Eventually the lens is already inserted and you just need to check if the lens is centered correctly

    Wire up the microscope to your computer

    In order to get the microscope working, we first need to install additional drivers. For the Daheng Camera, this would be:

    For additional information and an in-depth explanation for the UC2e system, please have a look here

    + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/FiveD_v2/index.html b/docs/Investigator/XYZMicroscope/FiveD_v2/index.html index aecb73834..147a7033b 100644 --- a/docs/Investigator/XYZMicroscope/FiveD_v2/index.html +++ b/docs/Investigator/XYZMicroscope/FiveD_v2/index.html @@ -10,12 +10,12 @@ - - + +
    -

    openUC2 FiveD v2

    Design Files

    :::warn +

    openUC2 FiveD v2

    Design Files

    :::warn https://github.com/openUC2/openUC2_XYZ_Stagescanning_Microscope/ :::

    Version 2

    This is a slightly updated version of the XYZ microscope that also includes fluorescence imaging. The here presented documentation has an extra feature since it can be operated vertically to image plants growing upwards. Also we use an adapter to mount Ropods magnetically.

    Assembly

    Open the Box:

    Find the microscope and the cables:

    The second layer has the controller and the microscope body: @@ -40,7 +40,7 @@

    Carefully take the lens out:

    In motion:

    Veritcal Operation

    Troubleshoot

    We learn from mistakes. So lets start learning. The system is fully open, meaning, you can adjust and change the vast majority of the parts on your own. The entire system consists of the openUC2 frame / skeleton and the 3D printed housing to shield it from dust and light. By removing all M3 cylindrical screws, you can detach the housing from the inner structure to eventually repair or alter the system.

    You can find a full description of how to dissassemble the microscope here: https://openuc2.github.io/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscope

    In Action

    We scanned arabidopsis in darkfield (LEDs >9 on):

    Connecting the microscope to the browser and controll it

    We encourage you to use the UC2ified ImSwitch software to control the microscope. You can find it in this repository: https://github.com/openUC2/ImSwitch/

    However, if you want to quick-start the microscope and see if it works, you can open your browser and use the WEB-Serial interface to interact with the microscope.

    Go to https://youseetoo.github.io/ and connect to your board (most right option saying ESP32 DEV-based UC2 standalone board V2). Select the COM Port which is holding the ESP32 and hit the LOG option, once the dialog opens. The alternative option will help you updating the firmware on the device. An in-depth explanation on how the firmware works can be found here.

    In general, you need to send JSON strings in order to control the system. The strings relevant for the Z-microscope are:

    Home the XY-axis

    It's important to always home the Motors in order to avoid them from getting stuck in an end position (ATTENTION!). The following string will move the motor until the endstop is hit. Afterwards it will release the switch:

    {"task":"/home_act", "home": {"steppers": [{"stepperid":1, "timeout": 2000, "speed": 15000, "direction":1, "endposrelease":3000}]}}

    and

    {"task":"/home_act", "home": {"steppers": [{"stepperid":2, "timeout": 2000, "speed": 15000, "direction":1, "endposrelease":3000}]}}

    Afterwards the internal position is set to 0. You can check that by entering:

    {"task": "/motor_get"}

    Move the Z-axis:

    The motor (Nema12) with 200 steps/revolution runs with 16 microstepps and offers a leadscrew with 1mm/revolution. Hence, one step corresponds to 312.5nm. Running the motor can be issued with the following command:

    {"task":"/motor_act",
    "motor":
    {
    "steppers": [
    { "stepperid": 3, "position": 1000, "speed": 15000, "isabs": 3, "isaccel":0}
    ]
    }
    }
    • stepperid: 3 correpsonds to the Z-axis
    • position: steps to go (not physical units!)
    • speed: steps / minute (do not exceed 20000)
    • isabs: absolute or relative motion
    • isaccel: for now, use only non-accelerated motion!

    Safety

    • in case of shattered glass, make sure you don't cut yourself
    • Make sure you don't hurt yourself
    • The moving parts can potentially hurt your finger
    • The electronics - if used in a wrong way - can harm you
    • edges may be sharp, make sure you don't cut yourself
    - - + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/FiveD_v3/index.html b/docs/Investigator/XYZMicroscope/FiveD_v3/index.html index 98572db82..c7394152a 100644 --- a/docs/Investigator/XYZMicroscope/FiveD_v3/index.html +++ b/docs/Investigator/XYZMicroscope/FiveD_v3/index.html @@ -10,12 +10,12 @@ - - + +
    -

    openUC2 FiveD v3

    Design Files

    :::warn +

    openUC2 FiveD v3

    Design Files

    :::warn https://github.com/openUC2/openUC2_XYZ_Stagescanning_Microscope/ :::

    Version 3

    This is a novel version of the XYZ microscope that also includes fluorescence imaging. It is not based on the UC2 cubes anymore but featues a monolithic body that is still 3D printed.

    More information are comming soon!

    Introduction

    A first video explains the basic operation of the device

    Software

    The device runs using ImSwitch with the following Configuration. Find the ImSwitch Installer here: https://github.com/openUC2/ImSwitchInstaller/releases/tag/v0.0.2 (go to latest release)

    Config

    {
    "positioners": {
    "ESP32Stage": {
    "managerName": "ESP32StageManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "isEnable": true,
    "enableauto": false,
    "stepsizeX": -0.3125,
    "stepsizeY": -0.3125,
    "stepsizeZ": 0.3125,
    "homeSpeedX": 15000,
    "homeSpeedY": 15000,
    "homeSpeedZ": 15000,
    "isDualaxis": true,
    "homeDirectionX": 1,
    "backlashXOld": 15,
    "backlashYOld": 40,
    "backlashX": 0,
    "backlashY": 0,
    "homeEndstoppolarityY": 0,
    "homeDirectionY": -1,
    "homeDirectionZ": 0,
    "homeXenabled": 1,
    "homeYenabled": 1,
    "homeZenabled": 0,
    "initialSpeed": {
    "X": 15000,
    "Y": 15000,
    "Z": 15000
    }
    },
    "axes": [
    "X",
    "Y",
    "Z"
    ],
    "forScanning": true,
    "forPositioning": true
    }
    },
    "rs232devices": {
    "ESP32": {
    "managerName": "ESP32Manager",
    "managerProperties": {
    "host_": "192.168.43.129",
    "serialport": "COM5",
    "baudrate":115200,
    "debug":1
    }
    }
    },
    "lasers": {
    "LED": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index": 1
    },
    "wavelength": 0,
    "valueRangeMin": 0,
    "valueRangeMax": 1023
    },
    "Laser 488": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index": 2
    },
    "wavelength": 635,
    "valueRangeMin": 0,
    "valueRangeMax": 1023
    },
    "Laser 635": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "ESP32LEDLaserManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "channel_index": 3
    },
    "wavelength": 488,
    "valueRangeMin": 0,
    "valueRangeMax": 1023
    }

    },
    "detectors": {
    "WidefieldCamera": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "HikCamManager",
    "managerProperties": {
    "isRGB": 1,
    "cameraListIndex": 0,
    "cameraEffPixelsize": 0.2257,
    "hikcam": {
    "exposure": 0,
    "gain": 0,
    "blacklevel": 100,
    "image_width": 1000,
    "image_height": 1000
    }
    },
    "forAcquisition": true,
    "forFocusLock": true
    },
    "Observer": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "OpenCVCamManager",
    "managerProperties": {
    "cameraListIndex": 1,
    "cameraListIndexWIN": 0,
    "isRGB":1,
    "opencvcam": {
    "exposure": 10
    }
    },
    "forAcquisition": true
    }
    },
    "autofocus": {
    "camera": "WidefieldCamera",
    "positioner": "ESP32Stage",
    "updateFreq": 10,
    "frameCropx": 780,
    "frameCropy": 400,
    "frameCropw": 500,
    "frameCroph": 100
    },
    "mct": {
    "monitorIdx": 2,
    "width": 1080,
    "height": 1920,
    "wavelength": 0,
    "pixelSize": 0,
    "angleMount": 0,
    "patternsDirWin": "C:\\Users\\wanghaoran\\Documents\\ImSwitchConfig\\imcontrol_slm\\488\\",
    "patternsDir": "/users/bene/ImSwitchConfig/imcontrol_sim/488"
    },
    "PixelCalibration": {},
    "focusLock": {
    "camera": "WidefieldCamera",
    "positioner": "ESP32StageManager",
    "updateFreq": 4,
    "frameCropx": 0,
    "frameCropy": 0,
    "frameCropw": 0,
    "frameCroph": 0
    },
    "availableWidgets": [
    "Settings",
    "View",
    "Recording",
    "Image",
    "Laser",
    "Positioner",
    "Autofocus",
    "MCT",
    "UC2Config",
    "ImSwitchServer",
    "PixelCalibration",
    "HistoScan",
    "ROIScan"
    ],
    "nonAvailableWidgets": [
    "STORMRecon",
    "DPC",
    "Hypha",
    "FocusLock",
    "HistoScan",
    "FocusLock",
    "FOVLock"
    ]
    }

    Assembly Instructions

    Assembly of the Optics Cube

    To begin the assembly of the microscope, the following components are needed:

    • The "ESP32 DEV-based UC2 standalone board V3"
    • Screws
    • Housing parts

    Electronic components including a development board

    Mount the middle plate with M3x12 screws:

    Mounting the middle plate with screws

    Ensure the screws are fixed properly on both sides. Also, cover the screws with non-conductive tape to prevent any shorts on the ESP32-based board side:

    Covering screws with non-conductive tape

    The top plate is attached with 8 screws on the red edges. Then, flip the microscope and attach the bottom plate with adhesive feet. For better stability, it is recommended to use 4 adhesive feet. Then the microscope can be placed upright again.

    Attaching the top plate with screws Adhesive feet on the bottom plate

    Assembly of the Optics Module (e.g., Camera)

    Next, you will need the following parts:

    • Camera
    • Mirror
    • Parts for the camera module

    The mirror is placed in the module.

    Placing the mirror in the module @@ -33,7 +33,7 @@ the second spring ball now plays along

    The last two fixed balls find their groove almost immediately before the front ball moves up its ramp to the pins

    Final position. All fixed balls in their "pin yokes" and both spring balls press the optics module against them.

    Final result

    Safety

    • in case of shattered glass, make sure you don't cut yourself
    • Make sure you don't hurt yourself
    • The moving parts can potentially hurt your finger
    • The electronics - if used in a wrong way - can harm you
    • edges may be sharp, make sure you don't cut yourself
    - - + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/HistoScan/index.html b/docs/Investigator/XYZMicroscope/HistoScan/index.html index e899b6622..06ce6838c 100644 --- a/docs/Investigator/XYZMicroscope/HistoScan/index.html +++ b/docs/Investigator/XYZMicroscope/HistoScan/index.html @@ -10,15 +10,15 @@ - - + +
    -

    Histo Scanner Plugin Documentation

    Welcome to the documentation page for the Histo Scanner Plugin, a powerful tool for scanning large areas and stitching images onto a large canvas. This page provides detailed information on how to configure and use the plugin effectively.

    Overview

    The Histoscanner Plugin integrates with the ImSwitch widget and controller to facilitate the scanning of large sample areas. Users can select a sample geometry and initiate scanning, which captures images and stitches them together to form a comprehensive view.

    Initial Setup

    Before starting a scan, ensure the following settings are configured correctly:

    • Pixel Size: Set in the setup.json file. This size must be calibrated, possibly using a ruler.
    • Step Size of Axis: Also set in the setup.json. It typically depends on the steps/mm defined by the leadscrew.
    • Sample Configuration File: An example file can be found here.

    Scanning Process

    The microscope will compute the scan area and the necessary scan stepsize on its own and will perform a snake scan. Alternatively you can provide a list of coordinates.

    Once the scan is successfully initiated, the final output is displayed in a downscaled version on napari to conserve memory.

    ImSwitch Configuration

    The configuration settings for the detector and stage are crucial. Here are the JSON settings for both:

    For the Stage

      "positioners": {
    "ESP32Stage": {
    "managerName": "ESP32StageManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "isEnable": true,
    "enableauto": false,
    "stepsizeX": -0.3125,
    "stepsizeY": -0.3125,
    "stepsizeZ": 0.3125,
    "homeSpeedX": 15000,
    "homeSpeedY": 15000,
    "homeSpeedZ": 15000,
    "isDualaxis": true,
    "homeDirectionX": 1,
    "backlashXOld": 15,
    "backlashYOld": 40,
    "backlashX": 0,
    "backlashY": 0,
    "homeEndstoppolarityY": 0,
    "homeDirectionY": -1,
    "homeDirectionZ": 0,
    "homeXenabled": 1,
    "homeYenabled": 1,
    "homeZenabled": 0,
    "initialSpeed": {
    "X": 15000,
    "Y": 15000,
    "Z": 15000
    }
    },
    "axes": [
    "X",
    "Y",
    "Z"
    ],
    "forScanning": true,
    "forPositioning": true
    }
    }

    For the Detector

      "detectors": {
    "WidefieldCamera": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "HikCamManager",
    "managerProperties": {
    "isRGB": 1,
    "cameraListIndex": 0,
    "cameraEffPixelsize": 0.2257,
    "hikcam": {
    "exposure": 0,
    "gain": 0,
    "blacklevel": 100,
    "image_width": 1000,
    "image_height": 1000
    }
    },
    "forAcquisition": true,
    "forFocusLock": true
    }

    File Handling

    • Storing Metadata: All metadata is stored in the OME.TIF format.
    • Opening in Fiji: Files can be easily opened and stitched in Fiji by importing them as OME.TIF.
    • Opening in ASHLAR: Use the script developed during the openUC2 hackathon available here as a starting point for handling files in Ashlar.

    Hardware/Software Setup

    Correct orientation of the stage coordinates and camera coordinates is essential. The configuration ensures that the camera orientation matches the stage scanning positions.

    In order to have correct orientation it's important that the stage coordinates and the camera coordaintes are matching. The below image shows how the camera has to be orietned w.r.t. the stage scanning positions +

    Histo Scanner Plugin Documentation

    Welcome to the documentation page for the Histo Scanner Plugin, a powerful tool for scanning large areas and stitching images onto a large canvas. This page provides detailed information on how to configure and use the plugin effectively.

    Overview

    The Histoscanner Plugin integrates with the ImSwitch widget and controller to facilitate the scanning of large sample areas. Users can select a sample geometry and initiate scanning, which captures images and stitches them together to form a comprehensive view.

    Initial Setup

    Before starting a scan, ensure the following settings are configured correctly:

    • Pixel Size: Set in the setup.json file. This size must be calibrated, possibly using a ruler.
    • Step Size of Axis: Also set in the setup.json. It typically depends on the steps/mm defined by the leadscrew.
    • Sample Configuration File: An example file can be found here.

    Scanning Process

    The microscope will compute the scan area and the necessary scan stepsize on its own and will perform a snake scan. Alternatively you can provide a list of coordinates.

    Once the scan is successfully initiated, the final output is displayed in a downscaled version on napari to conserve memory.

    ImSwitch Configuration

    The configuration settings for the detector and stage are crucial. Here are the JSON settings for both:

    For the Stage

      "positioners": {
    "ESP32Stage": {
    "managerName": "ESP32StageManager",
    "managerProperties": {
    "rs232device": "ESP32",
    "isEnable": true,
    "enableauto": false,
    "stepsizeX": -0.3125,
    "stepsizeY": -0.3125,
    "stepsizeZ": 0.3125,
    "homeSpeedX": 15000,
    "homeSpeedY": 15000,
    "homeSpeedZ": 15000,
    "isDualaxis": true,
    "homeDirectionX": 1,
    "backlashXOld": 15,
    "backlashYOld": 40,
    "backlashX": 0,
    "backlashY": 0,
    "homeEndstoppolarityY": 0,
    "homeDirectionY": -1,
    "homeDirectionZ": 0,
    "homeXenabled": 1,
    "homeYenabled": 1,
    "homeZenabled": 0,
    "initialSpeed": {
    "X": 15000,
    "Y": 15000,
    "Z": 15000
    }
    },
    "axes": [
    "X",
    "Y",
    "Z"
    ],
    "forScanning": true,
    "forPositioning": true
    }
    }

    For the Detector

      "detectors": {
    "WidefieldCamera": {
    "analogChannel": null,
    "digitalLine": null,
    "managerName": "HikCamManager",
    "managerProperties": {
    "isRGB": 1,
    "cameraListIndex": 0,
    "cameraEffPixelsize": 0.2257,
    "hikcam": {
    "exposure": 0,
    "gain": 0,
    "blacklevel": 100,
    "image_width": 1000,
    "image_height": 1000
    }
    },
    "forAcquisition": true,
    "forFocusLock": true
    }

    File Handling

    • Storing Metadata: All metadata is stored in the OME.TIF format.
    • Opening in Fiji: Files can be easily opened and stitched in Fiji by importing them as OME.TIF.
    • Opening in ASHLAR: Use the script developed during the openUC2 hackathon available here as a starting point for handling files in Ashlar.

    Hardware/Software Setup

    Correct orientation of the stage coordinates and camera coordinates is essential. The configuration ensures that the camera orientation matches the stage scanning positions.

    In order to have correct orientation it's important that the stage coordinates and the camera coordaintes are matching. The below image shows how the camera has to be orietned w.r.t. the stage scanning positions

    Tutorials and Demonstrations

    • Tutorial on Matching Axes: A tutorial explaining the matching of different axes is available on YouTube.
    • Full Plugin in Action: Watch the plugin in action here.

    Feel free to reach out with any queries or suggestions to enhance this documentation. Happy scanning with Histo Scanner! uUlJuI&ab_channel=openUC2

    Tutorial that explains how the different axes can be matched

    The Full plugin in action

    - - + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/MCTPlugin/index.html b/docs/Investigator/XYZMicroscope/MCTPlugin/index.html index 537d17e82..c6c43a747 100644 --- a/docs/Investigator/XYZMicroscope/MCTPlugin/index.html +++ b/docs/Investigator/XYZMicroscope/MCTPlugin/index.html @@ -10,13 +10,13 @@ - - + + - - +
    + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/ROIScanner/index.html b/docs/Investigator/XYZMicroscope/ROIScanner/index.html index 916c2e971..d8d4864fb 100644 --- a/docs/Investigator/XYZMicroscope/ROIScanner/index.html +++ b/docs/Investigator/XYZMicroscope/ROIScanner/index.html @@ -10,13 +10,13 @@ - - + +
    -

    ROI Scanner

    Starting ImSwitch on Ubuntu and Start the ROI Scanner

    First of all: Open the terminal. Type the following (all case sensitive):

    conda activate imswitch

    sudo chown user:user /dev/ttyUSB0 # where user is the current user you're logged into (then enter password)

    The USB port may differ, so perhaps also try this:

    sudo chown veo:veo /dev/ttyUSB1
    sudo chown veo:veo /dev/ttyUSB2

    Then:

    python -m imswitch

    The images are stored in the working directory of the terminal.

    - - +

    ROI Scanner

    Starting ImSwitch on Ubuntu and Start the ROI Scanner

    First of all: Open the terminal. Type the following (all case sensitive):

    conda activate imswitch

    sudo chown user:user /dev/ttyUSB0 # where user is the current user you're logged into (then enter password)

    The USB port may differ, so perhaps also try this:

    sudo chown veo:veo /dev/ttyUSB1
    sudo chown veo:veo /dev/ttyUSB2

    Then:

    python -m imswitch

    The images are stored in the working directory of the terminal.

    + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/SetupPhasecontrast/index.html b/docs/Investigator/XYZMicroscope/SetupPhasecontrast/index.html index b288bfe49..1cf1fdc53 100644 --- a/docs/Investigator/XYZMicroscope/SetupPhasecontrast/index.html +++ b/docs/Investigator/XYZMicroscope/SetupPhasecontrast/index.html @@ -10,12 +10,12 @@ - - + +
    -

    openUC2 Phase-Contrast Setup Tutorial

    Introduction: Understanding Phase-Contrast Microscopy

    Phase-contrast works by employing a unique ring-shaped illumination to highlight the specimen. The objective lens then features a corresponding ring-shaped phase apparatus located in its back focal plane. Light that doesn't scatter off the specimen is delayed and attenuated at the phase plate. This ensures that only scattered light is phase-shifted, leading to enhanced image contrast.

    For those who want a deeper dive into the exact mechanics of Zernike phase-contrast, we recommend consulting external references. UC2 system's phase-contrast doesn't rely on traditional lens systems. Instead, a LED ring comprising three concentrically arranged rings facilitates the ring-shaped illumination. A specialized phase-contrast microscope objective, embedded with a positive phase ring in its rear focal plane, facilitates the necessary shift of the unscattered light.

    Now, let’s get into the practical steps of setting up the UC2 system for phase-contrast.

    Setting Up Phase-Contrast in the UC2 System

    1. Mount the LED Light Source
      +

      openUC2 Phase-Contrast Setup Tutorial

      Introduction: Understanding Phase-Contrast Microscopy

      Phase-contrast works by employing a unique ring-shaped illumination to highlight the specimen. The objective lens then features a corresponding ring-shaped phase apparatus located in its back focal plane. Light that doesn't scatter off the specimen is delayed and attenuated at the phase plate. This ensures that only scattered light is phase-shifted, leading to enhanced image contrast.

      For those who want a deeper dive into the exact mechanics of Zernike phase-contrast, we recommend consulting external references. UC2 system's phase-contrast doesn't rely on traditional lens systems. Instead, a LED ring comprising three concentrically arranged rings facilitates the ring-shaped illumination. A specialized phase-contrast microscope objective, embedded with a positive phase ring in its rear focal plane, facilitates the necessary shift of the unscattered light.

      Now, let’s get into the practical steps of setting up the UC2 system for phase-contrast.

      Setting Up Phase-Contrast in the UC2 System

      1. Mount the LED Light Source
        Begin by placing the LED light source above your sample.

      2. Detach the Camera
        Unscrew the camera from the metal flange.

      3. Turn on the Outer Ring
        Using our website interface, activate the outer LED ring. Looking through the lens at the location where the camera was mounted, you should be able to see this ring.

      4. Align the Illumination Ring with the Objective’s Phase Ring
        @@ -29,7 +29,7 @@ The cheek cells are relatively thick and cause some additional image degradation (i.e. multiple scattering)

        Using only the green line of the ring improves overall contrast and reduces the artifact

        Multiple cell layers visualized with the UC2 XYZ phase-contrast microscope

    - - + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/SetupTubelens/index.html b/docs/Investigator/XYZMicroscope/SetupTubelens/index.html index d76b6196b..4d320bb25 100644 --- a/docs/Investigator/XYZMicroscope/SetupTubelens/index.html +++ b/docs/Investigator/XYZMicroscope/SetupTubelens/index.html @@ -10,15 +10,15 @@ - - + +
    -

    openUC2 Setting up the tube lens

    Introduction and Explanation

    The tube lens in the openUC2 setup is a critical component responsible for transforming an object that is far away (approximately infinitely far away) into an image on the camera sensor. Proper calibration of the tube lens is essential to ensure that it produces a sharp and clear image. In this documentation, we will guide you through the process of setting up the tube lens for your openUC2 system.

    To achieve optimal performance, you will need to follow these key steps:

    1. Adjust the CCTV lens to focus on objects at infinity.
    2. Fully open the aperture to maximize light collection.

    Additionally, there is a small tool called "the crown" that facilitates the alignment of the ring components. You can download "the crown" tool from this link.

    Procedure

    Follow these steps to set up the tube lens for your openUC2 system:

    1. Mount the Camera

    Begin by mounting the camera using the C-mount onto the CCTV lens. Ensure that you include the spacer ring to align the image plane of the CCTV lens with the sensor plane of the camera.

    2. Adjust the Image Plane

    Make sure that the camera is securely attached to the CCTV lens, preventing it from turning or becoming loose.

    3. Install "The Crown"

    Insert "the crown" tool between the camera and the CCTV lens. This tool will assist in aligning the rings correctly.

    4. Secure the Camera

    Turn the camera clockwise, typically 2-3 full rotations, to fully secure it to the CCTV lens.

    5. Open the Aperture

    Rotate the camera counter-clockwise until the aperture is fully open. Ensure that the camera is parallel to the ground during this step to maximize light intake.

    6. Focus to Infinity

    Now, use "the crown" tool to turn the outer focus ring of the CCTV lens counter-clockwise. This adjustment will ensure that the CCTV lens is focused at infinity.

    By following these steps, you will successfully set up the tube lens for your openUC2 system, allowing it to capture sharp and well-focused images. Remember to check and fine-tune your settings as needed to achieve the best results for your specific imaging requirements.

    - - +web-share">

    Introduction and Explanation

    The tube lens in the openUC2 setup is a critical component responsible for transforming an object that is far away (approximately infinitely far away) into an image on the camera sensor. Proper calibration of the tube lens is essential to ensure that it produces a sharp and clear image. In this documentation, we will guide you through the process of setting up the tube lens for your openUC2 system.

    To achieve optimal performance, you will need to follow these key steps:

    1. Adjust the CCTV lens to focus on objects at infinity.
    2. Fully open the aperture to maximize light collection.

    Additionally, there is a small tool called "the crown" that facilitates the alignment of the ring components. You can download "the crown" tool from this link.

    Procedure

    Follow these steps to set up the tube lens for your openUC2 system:

    1. Mount the Camera

    Begin by mounting the camera using the C-mount onto the CCTV lens. Ensure that you include the spacer ring to align the image plane of the CCTV lens with the sensor plane of the camera.

    2. Adjust the Image Plane

    Make sure that the camera is securely attached to the CCTV lens, preventing it from turning or becoming loose.

    3. Install "The Crown"

    Insert "the crown" tool between the camera and the CCTV lens. This tool will assist in aligning the rings correctly.

    4. Secure the Camera

    Turn the camera clockwise, typically 2-3 full rotations, to fully secure it to the CCTV lens.

    5. Open the Aperture

    Rotate the camera counter-clockwise until the aperture is fully open. Ensure that the camera is parallel to the ground during this step to maximize light intake.

    6. Focus to Infinity

    Now, use "the crown" tool to turn the outer focus ring of the CCTV lens counter-clockwise. This adjustment will ensure that the CCTV lens is focused at infinity.

    By following these steps, you will successfully set up the tube lens for your openUC2 system, allowing it to capture sharp and well-focused images. Remember to check and fine-tune your settings as needed to achieve the best results for your specific imaging requirements.

    + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/SmartMicroscopy/index.html b/docs/Investigator/XYZMicroscope/SmartMicroscopy/index.html new file mode 100644 index 000000000..1f04052f7 --- /dev/null +++ b/docs/Investigator/XYZMicroscope/SmartMicroscopy/index.html @@ -0,0 +1,22 @@ + + + + + +Smart Microscopy Using openUC2 and ImSwitch | openUC2 Documentation + + + + + + + + + + +
    +

    Smart Microscopy Using openUC2 and ImSwitch

    This tutorial will guide you through setting up a smart microscopy workflow using the openUC2 microscope and the ImSwitch software. We will perform a closed-loop experiment where the microscope follows a line based on image processing results.

    Prerequisites

    1. ImSwitch Software: Ensure that ImSwitch is running and accessible. For example, if running on the same computer, the URL might be https://localhost:8002 (check logs for the exact port).
    2. SSL Certificate: Access the REST API (e.g., https://localhost:8002/docs) in a browser and accept the security warning to use the web viewer (https://youseetoo.github.io/imswitch/index.html). Enter the URL and port under connections.

    Closed-Loop Feedback Pipeline

    The pipeline will:

    1. Snap an image
    2. Create a background image
    3. Subtract the background
    4. Compute edges using the Canny filter
    5. Perform Hough transform to find straight lines
    6. Determine the mean orientation of the lines
    7. Compute the next XY coordinate to move
    8. Return to the initial position

    Installation

    Install the necessary package:

    pip install https://github.com/openUC2/imswitchclient/archive/refs/heads/main.zip

    Code Implementation

    You can run the following code in a Jupyter notebook or Visual Studio Code. Adjust the client initialization to match your setup.

    # Load dependencies
    import cv2
    import numpy as np
    import tifffile as tif
    import matplotlib.pyplot as plt
    import os
    import imswitchclient.ImSwitchClient as imc
    import numpy as np
    import matplotlib.pyplot as plt
    import time
    from simple_pid import PID

    # Setup PID controller
    controller = PID(2, 0.1, 2)
    controller.send(None)

    # Initialize the client
    client = imc.ImSwitchClient(host="192.168.137.1", port=8002)

    # Retrieve the first positioner's name and current position
    positioner_names = client.positionersManager.getAllDeviceNames()
    positioner_name = positioner_names[0]
    currentPositions = client.positionersManager.getPositionerPositions()[positioner_name]
    initialPosition = (currentPositions["X"], currentPositions["Y"])

    # Loop through the process
    for iimage in range(10):
    # Snap image
    scalingFactor = .5
    pixel_to_stage = 1 / scalingFactor
    gaussianKernel = 201
    print("Taking image")
    iImage = client.recordingManager.snapNumpyToFastAPI(scalingFactor)
    mCrop = np.max(iImage.shape)
    Ny, Nx = iImage.shape

    # Remove background
    mBackground = cv2.GaussianBlur(iImage, (gaussianKernel, gaussianKernel), 0)
    iImage = iImage / mBackground
    iImage = iImage[Nx//2-mCrop:Nx//2+mCrop, Ny//2-mCrop:Ny//2+mCrop]

    # Process image
    image = np.uint8(iImage * 255)[:, :, np.newaxis]
    image[image > 100] = 0
    edges = cv2.Canny(image, 50, 150, apertureSize=3)
    lines = cv2.HoughLines(edges, 1, np.pi / 180, 100)

    # Calculate main orientation
    angles = [np.degrees(theta) for rho, theta in lines[:, 0]] if lines is not None else []
    main_orientation = np.mean(angles)
    dy = np.cos(np.radians(main_orientation)) * Nx / 2
    dx = np.sin(np.radians(main_orientation)) * Ny / 2

    # Handle NaN values
    dx = dx if not np.isnan(dx) else np.random.randint(-100, 100)
    dy = dy if not np.isnan(dy) else np.random.randint(-100, 100)

    newPosition = (dx * pixel_to_stage, dy * pixel_to_stage)
    print(f"We are moving the microscope in x:/y: {round(newPosition[0], 2)} / {round(newPosition[1], 2)}")

    client.positionersManager.movePositioner(positioner_name, "X", newPosition[0], is_absolute=False, is_blocking=True)
    client.positionersManager.movePositioner(positioner_name, "Y", newPosition[1], is_absolute=False, is_blocking=True)

    # Return to the initial position
    client.positionersManager.movePositioner(positioner_name, "X", initialPosition[0], is_absolute=True, is_blocking=True)
    client.positionersManager.movePositioner(positioner_name, "Y", initialPosition[1], is_absolute=True, is_blocking=True)

    Result

    The microscope will follow a line for 10 steps and then return to the initial position.

    Result

    This workflow demonstrates a basic smart microscopy setup using openUC2 and ImSwitch, allowing for closed-loop experiments based on real-time image processing. Adapt and expand this pipeline for your specific experiments and applications.

    + + + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/StageCalibration/index.html b/docs/Investigator/XYZMicroscope/StageCalibration/index.html index da8120e37..250faaffe 100644 --- a/docs/Investigator/XYZMicroscope/StageCalibration/index.html +++ b/docs/Investigator/XYZMicroscope/StageCalibration/index.html @@ -10,15 +10,15 @@ - - + +
    -

    Stage Mapping and Stage Calibration

    Stage Coordinates

    In this tutorial, we will guide you through the process of aligning the coordinate systems for the UC2 microscope stage. Proper alignment ensures that the movement of the stage corresponds accurately with the image displayed on the screen, facilitating an intuitive user experience. In principle all of this can be handled in software (e.g. flipping the camera image, changing stage axis), but it's always good to start with a common ground from the hardware side.

    +

    Stage Mapping and Stage Calibration

    Stage Coordinates

    In this tutorial, we will guide you through the process of aligning the coordinate systems for the UC2 microscope stage. Proper alignment ensures that the movement of the stage corresponds accurately with the image displayed on the screen, facilitating an intuitive user experience. In principle all of this can be handled in software (e.g. flipping the camera image, changing stage axis), but it's always good to start with a common ground from the hardware side.

    This is the microscope (UC2 XYZ v3) with the ingredients controlled by ImSwitch

    Alignment of Axes

    The goal of aligning the coordinate systems is to ensure they are correctly matched. The alignment of the stage is considered from the origin point (zero point). The desired behavior is as follows:

    • When the stage moves to the right (x+), the image on the screen should also move to the right.
    • When the stage moves upwards (y+), the image on the screen should move upwards as well.

    This is illustrated in the following Figure. When viewing the sample from above with the microscope positioned in front, the image should match what is shown in Imswitch. -This is also represented by the VirtualMicroscope with the VirtualStage and VirtualCamera in this config (Config: https://github.com/openUC2/ImSwitchConfig/blob/master/imcontrol_setups/example_virtualmicroscope.json).

    Understanding Axes in NumPy

    It's important to note the labeling of axes. In NumPy, x = 1 and y = 0. This means:

    • The x-axis is the second axis (index 1) of an array.
    • The y-axis is the first axis (index 0) of an array.

    NumPy arrays are multidimensional, with axes numbered as follows:

    • Axis 0 is the first axis (often the vertical direction).
    • Axis 1 is the second axis (often the horizontal direction).

    Stage Coordinate System

    When viewing the stage from above, the coordinate system is arranged as follows:

    • X-Axis (Horizontal)
      • Positive direction: Right
      • Negative direction: Left
    • Y-Axis (Vertical)
      • Positive direction: Up
      • Negative direction: Down

    Aligning Axes with Imswitch

    To enable intuitive operation, the stage and camera axes must be correctly aligned with the coordinate system in Imswitch. To achieve this, the commands "flip x" and "flip y" are used. These commands invert the direction of the axes in the coordinate system, meaning that movement or position along the axes is reversed.

    Steps for Aligning Axes:

    1. Initial Setup:

      • Ensure the microscope and stage are properly connected to the control software (e.g., Imswitch).
    2. Define Origin:

      • Identify the origin (zero point) of the stage coordinate system. (in Hardware this would be defined by the Endstops that are used for homing the axes; The motor will run - if the direction is set correctly - until it hits the switch)
    3. Test Movement:

      • Move the stage to the right and observe the direction of the image on the screen.
        • If the image moves left, apply the "flip x" command.
      • Move the stage upwards and observe the direction of the image on the screen.
        • If the image moves down, apply the "flip y" command.
    4. Adjust Axes:

      • Use the following commands as needed to align the axes:

        # Flip the x-axis if necessary
        if x_movement_incorrect:
        stage.flip_x()

        # Flip the y-axis if necessary
        if y_movement_incorrect:
        stage.flip_y()
    5. Verify Alignment:

      • After applying the flips, verify that the stage movements correspond correctly with the image movements on the screen.
    6. Save Configuration:

      • Save the configuration settings to ensure the alignment persists across sessions.

    Stage Calibration

    Richard Bowman and his team provided a very nice way to calibrate stage coordinates to camera pixel coordinates. We burtally integrated the open-source software which you can find here: https://gitlab.com/openflexure/microscope-extensions/camera-stage-mapping into ImSwitch. If you activate the HistoScan Controller and Widget you can start it either by the GUI or using the HTTP interface by calling http://localhost:8002/HistoScanController/startStageMapping (URL and PORT may differ). What the stage will do is moving a certain series of steps in XY, performs a cross-correlation of the images and computes the shift in XY of the mciroscope image on the camera, compares it to the expected shift on and returns the Image-To-Stage-Displacement Matrix as well as the Backlashvector. Both matrices/vectors are microscope specificand will help you matching e.g. stage coordinates for stitching software such as ASHLAR or OFM Stitching. This document should give you a rough idea of what's happening.

    Some terminology:

    • Combine X and Y calibrations: The calibration involves combining two separate measurements or calibration runs for the x and y directions, ensuring that the directions are orthogonal (at right angles to each other).

    • 2x2 transformation matrix: The image_to_stage_displacement matrix maps image displacements to stage displacements. This ensures that movements in the image coordinate system are accurately translated to movements in the stage coordinate system.

    • backlash_vector: This is a vector estimating the backlash (mechanical slack or play in the system) in each direction. In this case, the estimated backlash is zero, indicating a precise calibration with no noticeable mechanical play.

    • backlash: The function is expected to return the highest element of backlash_vector as a scalar value, which would be zero in this case.

    Interpreation of the Matrix

    Calibration Matrix image_to_stage_displacement

    The entries of the calibration matrix image_to_stage_displacement can be given specific names and meanings based on their positions within the matrix. Let's denote the matrix as follows:

    image_to_stage_displacement=(01.01.00)\text{image\_to\_stage\_displacement} = \begin{pmatrix} 0 & -1.0 \\ -1.0 & 0 \end{pmatrix}
    (abcd)\begin{pmatrix} a & b \\ c & d \end{pmatrix}

    Entries and Their Names

    1. a (0):

      • Name: a
      • Meaning: Represents the scaling factor from the x-coordinate in the image to the x-coordinate in the stage. Here, it is 0, indicating no direct mapping from image x to stage x.
    2. b (-1.0):

      • Name: b
      • Meaning: Represents the scaling factor from the y-coordinate in the image to the x-coordinate in the stage. The value -1.0 indicates an inverse and slightly scaled mapping from image y to stage x.
    3. c (-1.0):

      • Name: c
      • Meaning: Represents the scaling factor from the x-coordinate in the image to the y-coordinate in the stage. The value -1.0 indicates an inverse and slightly scaled mapping from image x to stage y.
    4. d (0):

      • Name: d
      • Meaning: Represents the scaling factor from the y-coordinate in the image to the y-coordinate in the stage. Here, it is 0, indicating no direct mapping from image y to stage y.

    Summary of the Matrix Entries

    • a (0): No direct mapping from image x to stage x.
    • b (-1.0): Inverse mapping from image y to stage x.
    • c (-1.0): Inverse mapping from image x to stage y.
    • d (0): No direct mapping from image y to stage y.

    This calibration matrix indicates that there is a transformation involving a 90-degree rotation combined with an inverse scaling factor slightly above 1 between the image coordinates and the stage coordinates. The exact interpretation may depend on the specific application, but generally, it implies that movements in one direction in the image are mapped to movements in the perpendicular direction on the stage with a slight scaling adjustment.

    Interpretation of the Values

    The following simulation of the VirtualMicroscope inside ImSwitch (Config: https://github.com/openUC2/ImSwitchConfig/blob/master/imcontrol_setups/example_virtualmicroscope.json):

    The result of the stage mapping is a json file containing (under /ImSwitch/calibFile.json) the following important element:

        "camera_stage_mapping_calibration": {
    "backlash": 0.0,
    "backlash_vector": [
    0.0,
    0.0,
    0.0
    ],
    "image_to_stage_displacement": [
    [
    0.0,
    -1.0
    ],
    [
    -1.0,
    0.0
    ]
    ]
    }

    The provided matrices explains transforming image coordinates to stage coordinates and estimating backlash. Let's break down the interpretation of the entries:

    1. image_to_stage_displacement Matrix

    mData["camera_stage_mapping_calibration"]["image_to_stage_displacement"] =
    array([[ 0. , -1.00135997],
    [-1.00135997, 0. ]])

    This matrix is a 2x2 transformation matrix used to map image coordinates to stage coordinates. Each entry in this matrix has a specific meaning:

    • [0,0] = 0: There is no direct transformation of the x-coordinate in the image to the x-coordinate in the stage.
    • [0,1] = -1.00135997: The y-coordinate in the image inversely affects the x-coordinate in the stage.
    • [1,0] = -1.00135997: The x-coordinate in the image inversely affects the y-coordinate in the stage.
    • [1,1] = 0: There is no direct transformation of the y-coordinate in the image to the y-coordinate in the stage.

    The presence of -1.00135997 off-diagonal elements indicates that the transformation involves a negative and approximately unit scaling between the coordinates, implying a possible 90-degree rotation combined with a scaling factor close to -1.

    2. backlash_vector Matrix

    mData["camera_stage_mapping_calibration"]["backlash_vector"] =
    array([ 0., 0., 0.])

    This vector represents the estimated backlash in each direction (x, y, and possibly z, though z is not utilized in a 2D context). Here, all elements are zero, indicating no measurable backlash in the x and y directions.

    - - +This is also represented by the VirtualMicroscope with the VirtualStage and VirtualCamera in this config (Config: https://github.com/openUC2/ImSwitchConfig/blob/master/imcontrol_setups/example_virtualmicroscope.json).

    Understanding Axes in NumPy

    It's important to note the labeling of axes. In NumPy, x = 1 and y = 0. This means:

    • The x-axis is the second axis (index 1) of an array.
    • The y-axis is the first axis (index 0) of an array.

    NumPy arrays are multidimensional, with axes numbered as follows:

    • Axis 0 is the first axis (often the vertical direction).
    • Axis 1 is the second axis (often the horizontal direction).

    Stage Coordinate System

    When viewing the stage from above, the coordinate system is arranged as follows:

    • X-Axis (Horizontal)
      • Positive direction: Right
      • Negative direction: Left
    • Y-Axis (Vertical)
      • Positive direction: Up
      • Negative direction: Down

    Aligning Axes with Imswitch

    To enable intuitive operation, the stage and camera axes must be correctly aligned with the coordinate system in Imswitch. To achieve this, the commands "flip x" and "flip y" are used. These commands invert the direction of the axes in the coordinate system, meaning that movement or position along the axes is reversed.

    Steps for Aligning Axes:

    1. Initial Setup:

      • Ensure the microscope and stage are properly connected to the control software (e.g., Imswitch).
    2. Define Origin:

      • Identify the origin (zero point) of the stage coordinate system. (in Hardware this would be defined by the Endstops that are used for homing the axes; The motor will run - if the direction is set correctly - until it hits the switch)
    3. Test Movement:

      • Move the stage to the right and observe the direction of the image on the screen.
        • If the image moves left, apply the "flip x" command.
      • Move the stage upwards and observe the direction of the image on the screen.
        • If the image moves down, apply the "flip y" command.
    4. Adjust Axes:

      • Use the following commands as needed to align the axes:

        # Flip the x-axis if necessary
        if x_movement_incorrect:
        stage.flip_x()

        # Flip the y-axis if necessary
        if y_movement_incorrect:
        stage.flip_y()
    5. Verify Alignment:

      • After applying the flips, verify that the stage movements correspond correctly with the image movements on the screen.
    6. Save Configuration:

      • Save the configuration settings to ensure the alignment persists across sessions.

    Stage Calibration

    Richard Bowman and his team provided a very nice way to calibrate stage coordinates to camera pixel coordinates. We burtally integrated the open-source software which you can find here: https://gitlab.com/openflexure/microscope-extensions/camera-stage-mapping into ImSwitch. If you activate the HistoScan Controller and Widget you can start it either by the GUI or using the HTTP interface by calling http://localhost:8002/HistoScanController/startStageMapping (URL and PORT may differ). What the stage will do is moving a certain series of steps in XY, performs a cross-correlation of the images and computes the shift in XY of the mciroscope image on the camera, compares it to the expected shift on and returns the Image-To-Stage-Displacement Matrix as well as the Backlashvector. Both matrices/vectors are microscope specificand will help you matching e.g. stage coordinates for stitching software such as ASHLAR or OFM Stitching. This document should give you a rough idea of what's happening.

    Some terminology:

    • Combine X and Y calibrations: The calibration involves combining two separate measurements or calibration runs for the x and y directions, ensuring that the directions are orthogonal (at right angles to each other).

    • 2x2 transformation matrix: The image_to_stage_displacement matrix maps image displacements to stage displacements. This ensures that movements in the image coordinate system are accurately translated to movements in the stage coordinate system.

    • backlash_vector: This is a vector estimating the backlash (mechanical slack or play in the system) in each direction. In this case, the estimated backlash is zero, indicating a precise calibration with no noticeable mechanical play.

    • backlash: The function is expected to return the highest element of backlash_vector as a scalar value, which would be zero in this case.

    Interpreation of the Matrix

    Calibration Matrix image_to_stage_displacement

    The entries of the calibration matrix image_to_stage_displacement can be given specific names and meanings based on their positions within the matrix. Let's denote the matrix as follows:

    image_to_stage_displacement=(01.01.00)\text{image\_to\_stage\_displacement} = \begin{pmatrix} 0 & -1.0 \\ -1.0 & 0 \end{pmatrix}
    (abcd)\begin{pmatrix} a & b \\ c & d \end{pmatrix}

    Entries and Their Names

    1. a (0):

      • Name: a
      • Meaning: Represents the scaling factor from the x-coordinate in the image to the x-coordinate in the stage. Here, it is 0, indicating no direct mapping from image x to stage x.
    2. b (-1.0):

      • Name: b
      • Meaning: Represents the scaling factor from the y-coordinate in the image to the x-coordinate in the stage. The value -1.0 indicates an inverse and slightly scaled mapping from image y to stage x.
    3. c (-1.0):

      • Name: c
      • Meaning: Represents the scaling factor from the x-coordinate in the image to the y-coordinate in the stage. The value -1.0 indicates an inverse and slightly scaled mapping from image x to stage y.
    4. d (0):

      • Name: d
      • Meaning: Represents the scaling factor from the y-coordinate in the image to the y-coordinate in the stage. Here, it is 0, indicating no direct mapping from image y to stage y.

    Summary of the Matrix Entries

    • a (0): No direct mapping from image x to stage x.
    • b (-1.0): Inverse mapping from image y to stage x.
    • c (-1.0): Inverse mapping from image x to stage y.
    • d (0): No direct mapping from image y to stage y.

    This calibration matrix indicates that there is a transformation involving a 90-degree rotation combined with an inverse scaling factor slightly above 1 between the image coordinates and the stage coordinates. The exact interpretation may depend on the specific application, but generally, it implies that movements in one direction in the image are mapped to movements in the perpendicular direction on the stage with a slight scaling adjustment.

    Interpretation of the Values

    The following simulation of the VirtualMicroscope inside ImSwitch (Config: https://github.com/openUC2/ImSwitchConfig/blob/master/imcontrol_setups/example_virtualmicroscope.json):

    The result of the stage mapping is a json file containing (under /ImSwitch/calibFile.json) the following important element:

        "camera_stage_mapping_calibration": {
    "backlash": 0.0,
    "backlash_vector": [
    0.0,
    0.0,
    0.0
    ],
    "image_to_stage_displacement": [
    [
    0.0,
    -1.0
    ],
    [
    -1.0,
    0.0
    ]
    ]
    }

    The provided matrices explains transforming image coordinates to stage coordinates and estimating backlash. Let's break down the interpretation of the entries:

    1. image_to_stage_displacement Matrix

    mData["camera_stage_mapping_calibration"]["image_to_stage_displacement"] =
    array([[ 0. , -1.00135997],
    [-1.00135997, 0. ]])

    This matrix is a 2x2 transformation matrix used to map image coordinates to stage coordinates. Each entry in this matrix has a specific meaning:

    • [0,0] = 0: There is no direct transformation of the x-coordinate in the image to the x-coordinate in the stage.
    • [0,1] = -1.00135997: The y-coordinate in the image inversely affects the x-coordinate in the stage.
    • [1,0] = -1.00135997: The x-coordinate in the image inversely affects the y-coordinate in the stage.
    • [1,1] = 0: There is no direct transformation of the y-coordinate in the image to the y-coordinate in the stage.

    The presence of -1.00135997 off-diagonal elements indicates that the transformation involves a negative and approximately unit scaling between the coordinates, implying a possible 90-degree rotation combined with a scaling factor close to -1.

    2. backlash_vector Matrix

    mData["camera_stage_mapping_calibration"]["backlash_vector"] =
    array([ 0., 0., 0.])

    This vector represents the estimated backlash in each direction (x, y, and possibly z, though z is not utilized in a 2D context). Here, all elements are zero, indicating no measurable backlash in the x and y directions.

    + + \ No newline at end of file diff --git a/docs/Investigator/XYZMicroscope/StageScanning/index.html b/docs/Investigator/XYZMicroscope/StageScanning/index.html index b3d085b48..fea289116 100644 --- a/docs/Investigator/XYZMicroscope/StageScanning/index.html +++ b/docs/Investigator/XYZMicroscope/StageScanning/index.html @@ -10,14 +10,14 @@ - - + +
    -

    Stage Scanning and Image Stitching (ASHLAR)

    We have multiple ways to perform stage scanning and stitching using ImSwitch. Below you can find dedicated documentation for some of them:

    1. Use ImScripting to take snapshots, save them locally and perform stitching using ASHLAR offline
    2. Perform GUI-based stitching and perform stitching using ASHLAR / simple pixel assignmens online (e.g. within/after the scanning process)
    3. Stitching using the Chatbot

    1. Using Imswitch for Image Retrieval and Stitching with Interactive ImScripting

    Overview

    This documentation outlines the steps to use Imswitch and Interactive IMScripting to retrieve images and stitch them together. The process involves configuring the motor settings, capturing images in a grid pattern, and stitching these images using ASHLAR.

    The following code can be executed inside the ImSwitch's ImScripting editor.

    # install a modified version of ashlar that enables loading numpy arrays directly without going through a file
    # python -m pip install https://github.com/openUC2/ashlar/archive/refs/heads/master.zip
    import numpy as np
    import time
    import threading
    import os
    import tifffile
    import re
    from ashlar.scripts import ashlar
    from ashlar.scripts.ashlar import process_images
    from pathlib import Path

    # Calculate the image size and the overlap of the images based on pixel size and resolution.
    mPixelSize = 1.0 # micron - use a calibration chart to get this right!
    input_dir = "./mScanImages"
    output_dir = "./mStitchedImage"
    input_name = "TmpTileFile.ome.tif"
    output_name = "ResultingStitchedImage.ome.tif"
    initialPosX = 0
    initialPosY = 0
    maximum_shift_microns = 1000
    Nx = 5
    Ny = 5
    # please try changing these two values to make it match!
    flip_x=True
    flip_y=False

    # create the folders and names
    Path(input_dir).mkdir(parents=True, exist_ok=True)
    Path(output_dir).mkdir(parents=True, exist_ok=True)
    ashlar_output_file = os.path.join(output_dir, output_name)
    ashlar_input_file = os.path.join(input_dir, input_name)

    mFrameShape = api.imcontrol.snapImage(True, False).shape
    xDim = mFrameShape[1] * mPixelSize
    yDim = mFrameShape[0] * mPixelSize
    mOverlap = 0.8 # 90% overlap at the edges

    # Set the motor control to 0 and define the motor speeds for the X, Y, and Z axes.
    positionerName = api.imcontrol.getPositionerNames()[0]
    api.imcontrol.setPositionerSpeed(positionerName, "X", 20000)
    api.imcontrol.setPositionerSpeed(positionerName, "Y", 20000)
    api.imcontrol.setPositionerSpeed(positionerName, "Z", 2000)

    ## Capture images in a 2x3 grid pattern. The stage moves to the start position and captures images at each step. Each image is saved with coordinates as the filename.
    iiter = 0

    USE_OME = False
    if USE_OME:# on MAC ARM M1 it may not work..
    with tifffile.TiffWriter(input_name) as tif: ## Define the input and output directories, and the pixel size. Open a new TIFF file to write the collected tiles, read each image, extract the position from the filename, prepare metadata, and write the image with metadata into the TIFF file. Finally, use ASHLAR to stitch the images together.
    for ix in np.arange(Nx):
    for iy in np.arange(Ny):
    mPos = (ix * xDim * mOverlap + initialPosX, iy * yDim * mOverlap + initialPosY)
    api.imcontrol.movePositioner(positionerName, "XY", mPos, True, True)
    time.sleep(0.5)
    mFrame = api.imcontrol.snapImage(True, False)
    metadata = {
    'Pixels': {'PhysicalSizeX': mPixelSize, 'PhysicalSizeXUnit': 'm', 'PhysicalSizeY': mPixelSize, 'PhysicalSizeYUnit': 'm'},
    'Plane': {'PositionX': ix, 'PositionY': iy}
    }
    tif.write(mFrame, metadata=metadata)
    iiter += 1
    ashlar.main(['', ashlar_input_file, '-o', ashlar_output_file, '--pyramid', '-m%s' % maximum_shift_microns, "-flip_x", flip_x, "-flip_y", flip_y])

    else: # this is a workaround with a numpy reader instead
    mImageList = []
    position_list = []
    for ix in range(Nx):
    for iy in range(Ny):
    mPos = (ix * xDim * mOverlap + initialPosX, iy * yDim * mOverlap + initialPosY)
    api.imcontrol.movePositioner(positionerName, "XY", mPos, True, True)
    time.sleep(0.5)
    mFrame = api.imcontrol.snapImage(True, False)
    mImageList.append(mFrame)
    position_list.append(mPos)
    print(mPos)
    arrays = [np.expand_dims(np.array(mImageList),1)] # (num_images, num_channels, height, width)
    # create a 2D list of xy positions
    position_list = np.array(position_list)

    # Process numpy arrays
    process_images(filepaths=arrays,
    output='ashlar_output.tif',
    align_channel=0,
    flip_x=flip_x,
    flip_y=flip_y,
    flip_mosaic_x=False,
    flip_mosaic_y=False,
    output_channels=None,
    maximum_shift=maximum_shift_microns,
    stitch_alpha=0.01,
    maximum_error=None,
    filter_sigma=0,
    filename_format='cycle_{cycle}_channel_{channel}.tif',
    pyramid=False,
    tile_size=1024,
    ffp=None,
    dfp=None,
    barrel_correction=0,
    plates=False,
    quiet=False,
    position_list=position_list,
    pixel_size=mPixelSize)
    mImage = tifffile.imread('ashlar_output.tif')

    #display the resulting tiles
    api.imcontrol.displayImageNapari("Tiles", arrays[0], isRGB=False)

    print(position_list)
    # display the resulting image
    api.imcontrol.displayImageNapari("StitchedImage", mImage, isRGB=False)

    Additional Image Processing

    Additional commands can be used to manipulate the stitched image if needed (depends if the image looks weird or not..):

    Flip X-Axis

    ashlar.main(['', collected_tiles_file, '-o', ashlar_output_file, '--pyramid', '-m%s' % maximum_shift_microns, "--flip-x"])

    Flip Both Axes and Mirror Images in X-Direction

    ashlar.main(['', collected_tiles_file, '-o', ashlar_output_file, '--pyramid', '-m%s' % maximum_shift_microns, "--flip-mosaic-x"])

    In action

    Here we use the loading of numpy images inside ImSwitch and process them with Ashlar to directly dipslay them in Napari. +

    Stage Scanning and Image Stitching (ASHLAR)

    We have multiple ways to perform stage scanning and stitching using ImSwitch. Below you can find dedicated documentation for some of them:

    1. Use ImScripting to take snapshots, save them locally and perform stitching using ASHLAR offline
    2. Perform GUI-based stitching and perform stitching using ASHLAR / simple pixel assignmens online (e.g. within/after the scanning process)
    3. Stitching using the Chatbot

    1. Using Imswitch for Image Retrieval and Stitching with Interactive ImScripting

    Overview

    This documentation outlines the steps to use Imswitch and Interactive IMScripting to retrieve images and stitch them together. The process involves configuring the motor settings, capturing images in a grid pattern, and stitching these images using ASHLAR.

    The following code can be executed inside the ImSwitch's ImScripting editor.

    # install a modified version of ashlar that enables loading numpy arrays directly without going through a file
    # python -m pip install https://github.com/openUC2/ashlar/archive/refs/heads/master.zip
    import numpy as np
    import time
    import threading
    import os
    import tifffile
    import re
    from ashlar.scripts import ashlar
    from ashlar.scripts.ashlar import process_images
    from pathlib import Path

    # Calculate the image size and the overlap of the images based on pixel size and resolution.
    mPixelSize = 1.0 # micron - use a calibration chart to get this right!
    input_dir = "./mScanImages"
    output_dir = "./mStitchedImage"
    input_name = "TmpTileFile.ome.tif"
    output_name = "ResultingStitchedImage.ome.tif"
    initialPosX = 0
    initialPosY = 0
    maximum_shift_microns = 1000
    Nx = 5
    Ny = 5
    # please try changing these two values to make it match!
    flip_x=True
    flip_y=False

    # create the folders and names
    Path(input_dir).mkdir(parents=True, exist_ok=True)
    Path(output_dir).mkdir(parents=True, exist_ok=True)
    ashlar_output_file = os.path.join(output_dir, output_name)
    ashlar_input_file = os.path.join(input_dir, input_name)

    mFrameShape = api.imcontrol.snapImage(True, False).shape
    xDim = mFrameShape[1] * mPixelSize
    yDim = mFrameShape[0] * mPixelSize
    mOverlap = 0.8 # 90% overlap at the edges

    # Set the motor control to 0 and define the motor speeds for the X, Y, and Z axes.
    positionerName = api.imcontrol.getPositionerNames()[0]
    api.imcontrol.setPositionerSpeed(positionerName, "X", 20000)
    api.imcontrol.setPositionerSpeed(positionerName, "Y", 20000)
    api.imcontrol.setPositionerSpeed(positionerName, "Z", 2000)

    ## Capture images in a 2x3 grid pattern. The stage moves to the start position and captures images at each step. Each image is saved with coordinates as the filename.
    iiter = 0

    USE_OME = False
    if USE_OME:# on MAC ARM M1 it may not work..
    with tifffile.TiffWriter(input_name) as tif: ## Define the input and output directories, and the pixel size. Open a new TIFF file to write the collected tiles, read each image, extract the position from the filename, prepare metadata, and write the image with metadata into the TIFF file. Finally, use ASHLAR to stitch the images together.
    for ix in np.arange(Nx):
    for iy in np.arange(Ny):
    mPos = (ix * xDim * mOverlap + initialPosX, iy * yDim * mOverlap + initialPosY)
    api.imcontrol.movePositioner(positionerName, "XY", mPos, True, True)
    time.sleep(0.5)
    mFrame = api.imcontrol.snapImage(True, False)
    metadata = {
    'Pixels': {'PhysicalSizeX': mPixelSize, 'PhysicalSizeXUnit': 'm', 'PhysicalSizeY': mPixelSize, 'PhysicalSizeYUnit': 'm'},
    'Plane': {'PositionX': ix, 'PositionY': iy}
    }
    tif.write(mFrame, metadata=metadata)
    iiter += 1
    ashlar.main(['', ashlar_input_file, '-o', ashlar_output_file, '--pyramid', '-m%s' % maximum_shift_microns, "-flip_x", flip_x, "-flip_y", flip_y])

    else: # this is a workaround with a numpy reader instead
    mImageList = []
    position_list = []
    for ix in range(Nx):
    for iy in range(Ny):
    mPos = (ix * xDim * mOverlap + initialPosX, iy * yDim * mOverlap + initialPosY)
    api.imcontrol.movePositioner(positionerName, "XY", mPos, True, True)
    time.sleep(0.5)
    mFrame = api.imcontrol.snapImage(True, False)
    mImageList.append(mFrame)
    position_list.append(mPos)
    print(mPos)
    arrays = [np.expand_dims(np.array(mImageList),1)] # (num_images, num_channels, height, width)
    # create a 2D list of xy positions
    position_list = np.array(position_list)

    # Process numpy arrays
    process_images(filepaths=arrays,
    output='ashlar_output.tif',
    align_channel=0,
    flip_x=flip_x,
    flip_y=flip_y,
    flip_mosaic_x=False,
    flip_mosaic_y=False,
    output_channels=None,
    maximum_shift=maximum_shift_microns,
    stitch_alpha=0.01,
    maximum_error=None,
    filter_sigma=0,
    filename_format='cycle_{cycle}_channel_{channel}.tif',
    pyramid=False,
    tile_size=1024,
    ffp=None,
    dfp=None,
    barrel_correction=0,
    plates=False,
    quiet=False,
    position_list=position_list,
    pixel_size=mPixelSize)
    mImage = tifffile.imread('ashlar_output.tif')

    #display the resulting tiles
    api.imcontrol.displayImageNapari("Tiles", arrays[0], isRGB=False)

    print(position_list)
    # display the resulting image
    api.imcontrol.displayImageNapari("StitchedImage", mImage, isRGB=False)

    Additional Image Processing

    Additional commands can be used to manipulate the stitched image if needed (depends if the image looks weird or not..):

    Flip X-Axis

    ashlar.main(['', collected_tiles_file, '-o', ashlar_output_file, '--pyramid', '-m%s' % maximum_shift_microns, "--flip-x"])

    Flip Both Axes and Mirror Images in X-Direction

    ashlar.main(['', collected_tiles_file, '-o', ashlar_output_file, '--pyramid', '-m%s' % maximum_shift_microns, "--flip-mosaic-x"])

    In action

    Here we use the loading of numpy images inside ImSwitch and process them with Ashlar to directly dipslay them in Napari. Make sure the orientation is set as in the animation below according to the Stage Calibration results.

    # keep this number low (e.g. 1-2 to check the correction direction of X/Y)
    Nx = 2
    Ny = 2
    # please try changing these two values to make it match!
    flip_x=True
    flip_y=False

    2. Perform GUI-based stitching and perform stitching using ASHLAR in the Main GUI

    Navigate to the HistoScan Menu and perform the grid-based scanning. Select ASHLAR stitching and the appropriate flipping of the axes (will be suggested by the previously performed stage mapping) and run the scanning. The stitched result will be displayed after some computational time which may vary depending on your CPU and memory availabililty.

    3. Stitching using the Chatbot

    A recent experimental feature is to use the BioImage.io chatbot and provide a customized extension to interact with the microscope. The extension is implemented in the HyphaController and exposes certain functions to the chatbot interface. This lets us formulate prompts that will then interact with the microscope. Below we formulate a simple query:

    Can you turn on the light of the uc2 microscope to 512 and perform a slide scan with default parameters?

    which gets interpreted on the microscope side

    Tool Call: U2MicroscopeSetIllumination
    Arguments:
    - channel: 0

    - intensity: 512

    Result: Set the illumination!
    Tool Call: U2MicroscopeSlideScan
    Arguments:
    - numberTilesX: 3

    - numberTilesY: 3

    - stepSizeX: 0

    - stepSizeY: 0

    - nTimes: 1

    - tPeriod: 1

    - illuSource: ``

    - initPosX: 0

    - initPosY: 0

    - isStitchAshlar: true

    - isStitchAshlarFlipX: true

    - isStitchAshlarFlipY: false

    Result: Started slide scanning!

    The result is a scan and a following stitching routine using ASHLAR:

    - - + + \ No newline at end of file diff --git a/docs/Investigator/ZMicroscope/UpackZMicroscope/index.html b/docs/Investigator/ZMicroscope/UpackZMicroscope/index.html index 0a693f2a8..0505d50ca 100644 --- a/docs/Investigator/ZMicroscope/UpackZMicroscope/index.html +++ b/docs/Investigator/ZMicroscope/UpackZMicroscope/index.html @@ -10,13 +10,13 @@ - - + +

    Unpack the openUC2 Z-Microscope

    Unpacking the microscope

    The hardcover plastic case contains all you need for the microscope:

    • USB micro cable
    • USB3 camera cable
    • 12V power-supply
    • Sweet treat
    • The actual microscope
    • The objective lens

    The actual Box looks like this:

    Please also find the treat and make sure you provide yourself with enough sugar throughout this unpacking routine :-)

    The foam holds the microscope in place (the actual colour may differ from what you may see):

    The cables are hidden behind the foam:

    Check if you find the content of the box:

    Getting started

    First of all we need to wire up the microscope. For this we will start with the 12V power supply. Unfortunately the powersocket is inside the case, hence you have to first eat some candy in order to better find the spot ;-)

    The same holds true for the USB connection to the microcontroller board. You need to hook it up like that:

    Once done, we continue with inserting the objective lens. Eventually the lens is already inserted and you just need to check if the lens is centered correctly

    The microscope should look like this:

    Wire up the microscope to your computer

    In order to get the microscope working, we first need to install additional drivers. For the Daheng Camera, this would be:

    For additional information and an in-depth explanation for the UC2e system, please have a look here

    Troubleshoot

    We learn from mistakes. So lets start learning. The system is fully open, meaning, you can adjust and change the vast majority of the parts on your own. The entire system consists of the openUC2 frame / skeleton and the 3D printed housing to shield it from dust and light. By removing all M3 cylindrical screws, you can detech the housing from the inner structure to eventually repair or alter the system.

    A 2.5m hex key will help you for finishing this job:

    Lift the lid and the microscpe will follow (make sure all cables are detached):

    Now you can start working on the "inner bits":

    In Action

    Here you see the extended focussing of the objective lens:

    Connecting the microscope to the browser and controll it

    We encourage you to use the UC2ified ImSwitch software to control the microscope. You can find it in this repository: https://github.com/openUC2/ImSwitch/

    However, if you want to quick-start the microscope and see if it works, you can open your browser and use the WEB-Serial interface to interact with the microscope.

    Go to https://youseetoo.github.io/ and connect to your board (most right option saying ESP32 DEV-based UC2 standalone board V2). Select the COM Port which is holding the ESP32 and hit the LOG option, once the dialog opens. The alternative option will help you updating the firmware on the device. An in-depth explanation on how the firmware works can be found here.

    In general, you need to send JSON strings in order to control the system. The strings relevant for the Z-microscope are:

    Home the Z-axis

    It's important to always home the Motors in order to avoid them from getting stuck in an end position (ATTENTION!). The following string will move the motor until the endstop is hit. Afterwards it will release the switch:

    {"task":"/home_act", "home": {"steppers": [{"stepperid":3, "timeout": 2000, "speed": 15000, "direction":1, "endposrelease":3000}]}}

    Afterwards the internal position is set to 0. You can check that by entering:

    {"task": "/motor_get"}

    Move the Z-axis:

    The motor (Nema12) with 200 steps/revolution runs with 16 microstepps and offers a leadscrew with 1mm/revolution. Hence, one step corresponds to 312.5nm. Running the motor can be issued with the following command:

    {"task":"/motor_act",
    "motor":
    {
    "steppers": [
    { "stepperid": 3, "position": 1000, "speed": 15000, "isabs": 3, "isaccel":0}
    ]
    }
    }
    • stepperid: 3 correpsonds to the Z-axis
    • position: steps to go (not physical units!)
    • speed: steps / minute (do not exceed 20000)
    • isabs: absolute or relative motion
    • isaccel: for now, use only non-accelerated motion!

    Safety

    • in case of shattered glass, make sure you don't cut yourself
    • Make sure you don't hurt yourself
    • The moving parts can potentially hurt your finger
    • The electronics - if used in a wrong way - can harm you
    • edges may be sharp, make sure you don't cut yourself
    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscope/index.html b/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscope/index.html index 2b232ce9a..721f1f0d0 100644 --- a/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscope/index.html +++ b/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscope/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -27,7 +27,7 @@

    Prepare the Z-stage

    The documentation for the motorized 25mm Z-stage can be found here: https://openuc2.github.io/docs/PRODUCTION/PG_12_STAGE_Z_NEMA

    Once done, lock the Stage with the Puzzle piece with M5x8 worm screws:

    Prepare Electronics

    Bill of Material

    • UC2e v2 electronics
    • 3x A4988 Stepper driver
    • 12V power supply
    • USB micro cable
    • 3D printed case
    • 2x puzzle pieces
    • 8 M5x8 thread-only screws
    • 4x M3x8mm screws

    Assembly

    Attach the electronics board to the 3D printed assembly and tighten it with the M3 screws (cylindrical, Din906) Attach the puzzle pieces to the distal ends of the assembly and lock it with the M5 screws. For this the yet closed holes have to be opened by "drilling" it through.

    Prepare Triangle Structure

    Tubelens

    Bill of Material

    • Berrybase 100mm CCTV Lens
    • Daheng Vision IMX226 sensor
    • USB 3 Camera Cable
    • 2x Puzzlepieze
    • 8x M5x8 mm worm screw
    • 4x M3x18mm screw

    Assembly

    Adding the Baseplate

    Endstops and Illumination

    Skeleton

    Fully Assembled

    Fluo Extension

    Improvements

    Stage does not run smoothly

    You can release the pressure on the linear bearings by loosening the screws carefully. Make sure you don't introduce unneccesary play. The stage works with two v-grooves and balls in between.

    Additional images (have to be sorted)

    Safety

    TODO: Add additional information!

    • in case of shattered glass, make sure you don't cut yourself
    • Make sure you don't hurt yourself
    • The moving parts can potentially hurt your finger
    • The electronics - if used in a wrong way - can harm you
    • edges may be sharp, make sure you don't cut yourself
    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/APERTURES/index.html b/docs/PRODUCTION/Modules/APERTURES/index.html index 3735f4f10..877ed0768 100644 --- a/docs/PRODUCTION/Modules/APERTURES/index.html +++ b/docs/PRODUCTION/Modules/APERTURES/index.html @@ -10,13 +10,13 @@ - - + +

    Apertures

    This page describes how to assemble the kinematic XY mount for moving a laser/ pinhole.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device.

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl
    #01-012BaseSpring LocksPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011Spring0,20€NONE
    #01-011Threaded Inserts M30,20€NONE
    #01-011Screw Ball head0,20€NONE

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/BEAMSPLITTER/index.html b/docs/PRODUCTION/Modules/BEAMSPLITTER/index.html index d1dad0606..c04dbe4ba 100644 --- a/docs/PRODUCTION/Modules/BEAMSPLITTER/index.html +++ b/docs/PRODUCTION/Modules/BEAMSPLITTER/index.html @@ -10,13 +10,13 @@ - - + +

    KINEMATIC MIRROR (90°)

    This page describes how to assemble the partially transparent mirror (45°) module. It uses a Frontsurface 50% mirror that.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-01150% MirrorAstromedia 40x30mm PArtially Transparent Mirror8,00€Astromedia

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    caution

    Insert the beam splitter in such a way that the mirroring/coated surface points away from the printed part!

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/Camera/index.html b/docs/PRODUCTION/Modules/Camera/index.html index 46fdf48cd..e1f121d44 100644 --- a/docs/PRODUCTION/Modules/Camera/index.html +++ b/docs/PRODUCTION/Modules/Camera/index.html @@ -10,15 +10,15 @@ - - + +

    USB Camera

    Duration: 1

    This page describes how to assemble the camera module. It has two different variations:

    • IMX214 (Arducam, LINK)
    • IMX179 (Waveshare, LINK)

    Bill of material

    Below you will find all components necessary to build this device

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    TypeDetailsLink
    Camera holderholds IMX179/214Part.stl

    Additional parts

    This is used in the current version of the setup

    TypeDetailsPriceLink
    USB CameraArducam USB IMX21440 €Amazon
    or---------
    USB CameraArducam USB IMX21440 €Amazon
    Mounting screwsDIN912 M3x12mm Screws0.40 €Würth

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    IMX219 (Arducam)

    STEP 1

    All parts you need to assemble the module:

    Remove the lens (M12/cellphone) from the camera board.

    STEP 2

    Use the DIN912 M3x12mm screws to mount the camera securely. ATTENTION: Use the decentered holes such that the Camera is placed in the inserts's center!

    STEP 3

    Add the insert to the cube, close it and store the cable safely.

    IMX179 (Waveshare)

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    Remove the lens (M12) from the camera board using a cross key.

    STEP 3

    Use the DIN912 M3x12mm screws to mount the camera securely. ATTENTION: Use the centered screws to have the camera's lens in the center of the module!

    STEP 4

    To remove the lens take a cloth/tissue and pliers and knock it of with some force. Don't break the part!

    STEP 5

    Add the part to the cube and you're done.

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/Eyepiece/index.html b/docs/PRODUCTION/Modules/Eyepiece/index.html index 146d1cfd5..c7d652a0f 100644 --- a/docs/PRODUCTION/Modules/Eyepiece/index.html +++ b/docs/PRODUCTION/Modules/Eyepiece/index.html @@ -10,13 +10,13 @@ - - + +

    Eyepiece

    Duration: 1

    This page describes how to assemble the Eyepiece module. It uses a standard eyepiece with 22mm diameter.

    Bill of material

    Below you will find all components necessary to build this device

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    TypeDetailsLink
    Holderholds EyepiecePart.stl

    Additional parts

    This is used in the current version of the setup

    TypeDetailsPriceLink
    Eyepiece10x, 18mm Eyepiece10 €Aliexpress

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    Remove XX

    STEP 2

    Use Hotglue to permanently mount the eyepiece to the insert.

    STEP 3

    Add the insert to the cube, close it and store it safely.

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/KIN_MIR_45/index.html b/docs/PRODUCTION/Modules/KIN_MIR_45/index.html index 21502662d..e1b845515 100644 --- a/docs/PRODUCTION/Modules/KIN_MIR_45/index.html +++ b/docs/PRODUCTION/Modules/KIN_MIR_45/index.html @@ -10,13 +10,13 @@ - - + +

    KINEMATIC MIRROR (45°)

    This page describes how to assemble the kinematic mirror (45°) module. It uses a Frontsurface mirror that can be tuned in 3 axis

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds screwsPart.stl
    #01-021Mirror Mountholds mirror and is movablePart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011FS MirrorAstromedia 40x30mm Frontsurface Mirror5,00€Astromedia
    #01-024Screw (orings)M3x12, DIN9125,00€Astromedia
    #01-033Screw (Pushing)M3x20, NOT DECIDED!15,00€Link
    #01-042O-Ringr=8mm0,10€Link
    #01-051Cube5,00€Link

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    STEP 3

    STEP 4

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/KIN_MIR_90/index.html b/docs/PRODUCTION/Modules/KIN_MIR_90/index.html index ff3c5d23f..940c6a4ed 100644 --- a/docs/PRODUCTION/Modules/KIN_MIR_90/index.html +++ b/docs/PRODUCTION/Modules/KIN_MIR_90/index.html @@ -10,13 +10,13 @@ - - + +

    KINEMATIC MIRROR (90°)

    This page describes how to assemble the kinematic mirror (90) module. It uses a Frontsurface mirror that can be tuned in 3 axis

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds screwsPart.stl
    #01-021Mirror Mountholds mirror and is movablePart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011FS MirrorAstromedia 40x30mm Frontsurface Mirror5,00€Astromedia
    #01-024Screw (orings)M3x12, DIN9125,00€Astromedia
    #01-033Screw (Pushing)M3x20, NOT DECIDED!15,00€Link
    #01-042O-Ringr=8mm0,10€Link
    #01-051Cube5,00€Link

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    STEP 3

    STEP 4

    STEP 5

    STEP 6

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/KIN_XY_LASER/index.html b/docs/PRODUCTION/Modules/KIN_XY_LASER/index.html index 0ac45f686..bddb63b0d 100644 --- a/docs/PRODUCTION/Modules/KIN_XY_LASER/index.html +++ b/docs/PRODUCTION/Modules/KIN_XY_LASER/index.html @@ -10,13 +10,13 @@ - - + +

    Kinematic XY Mount

    This page describes how to assemble the kinematic XY mount for moving a laser/ pinhole.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device.

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl
    #01-012BaseSpring LocksPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011Spring0,20€NONE
    #01-011Threaded Inserts M30,20€NONE
    #01-011Screw Ball head0,20€NONE

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    STEP 3

    STEP 4

    STEP 6

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/LENS/index.html b/docs/PRODUCTION/Modules/LENS/index.html index 966eae26c..eb616d4ac 100644 --- a/docs/PRODUCTION/Modules/LENS/index.html +++ b/docs/PRODUCTION/Modules/LENS/index.html @@ -10,14 +10,14 @@ - - + +

    LENS

    Duration: 1

    This page describes how to assemble the LEns module. It uses a 40mm biconvex/biconcave glasslens with varyin diameter

    Bill of material

    Below you will find all components necessary to build this device

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDTypeDetailsLink
    #01-01Holderholds LensPart.stl
    #01-02Lidfixes lens using screw mechanismPart.stl

    Additional parts

    This is used in the current version of the setup

    IDTypeDetailsPriceLink
    #01-03Lens (50mm)f'=50mm, Diameter 40mm2 €Aliexpress
    or
    #01-04Lens (100mm)f'=100mm, Diameter 40mm2 €Aliexpress
    or
    #01-05Lens (-50mm)f'=-50mm, Diameter 40mm2 €Aliexpress

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    Thread in the round piece tand fix the lens. Make sure you use the right ring for the right lens!

    caution

    Clean the lenses with a cotton cloth.

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/MIR_45/index.html b/docs/PRODUCTION/Modules/MIR_45/index.html index 5d4559459..542435c50 100644 --- a/docs/PRODUCTION/Modules/MIR_45/index.html +++ b/docs/PRODUCTION/Modules/MIR_45/index.html @@ -10,13 +10,13 @@ - - + +

    Mirror (45°)

    This page describes how to assemble the standard fold mirror (45°) module. It uses a Frontsurface mirror.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011MirrorAstromedia 40x30mm PArtially Transparent Mirror8,00€Astromedia

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    STEP 3

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/POLARIZER_ROTATING/index.html b/docs/PRODUCTION/Modules/POLARIZER_ROTATING/index.html index ab4684c25..8aa3dadf7 100644 --- a/docs/PRODUCTION/Modules/POLARIZER_ROTATING/index.html +++ b/docs/PRODUCTION/Modules/POLARIZER_ROTATING/index.html @@ -10,13 +10,13 @@ - - + +

    Polarization Rotator

    This page describes how to assemble the kinematic XY mount for moving a laser/ pinhole.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device.

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl
    #01-012BaseSpring LocksPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011Spring0,20€NONE
    #01-011Threaded Inserts M30,20€NONE
    #01-011Screw Ball head0,20€NONE

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    STEP 3

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/SAMPLE_HOLDEr/index.html b/docs/PRODUCTION/Modules/SAMPLE_HOLDEr/index.html index d810fae4d..4225d44d8 100644 --- a/docs/PRODUCTION/Modules/SAMPLE_HOLDEr/index.html +++ b/docs/PRODUCTION/Modules/SAMPLE_HOLDEr/index.html @@ -10,13 +10,13 @@ - - + +

    Sample Holder

    This page describes how to assemble the kinematic XY mount for moving a laser/ pinhole.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device.

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl
    #01-012BaseSpring LocksPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011Spring0,20€NONE
    #01-011Threaded Inserts M30,20€NONE
    #01-011Screw Ball head0,20€NONE

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    :::alert

    this has been updated with magnets! Make sure you label the magnets with prior to insertion so that polarity is correct.

    :::

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/STAGE_Z_MANUAL/index.html b/docs/PRODUCTION/Modules/STAGE_Z_MANUAL/index.html index b57b00267..67d30a351 100644 --- a/docs/PRODUCTION/Modules/STAGE_Z_MANUAL/index.html +++ b/docs/PRODUCTION/Modules/STAGE_Z_MANUAL/index.html @@ -10,13 +10,13 @@ - - + +

    Kinematic XY Mount / Laser

    This page describes how to assemble the kinematic XY mount for moving a laser/ pinhole.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device.

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl
    #01-012BaseSpring LocksPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011Spring0,20€NONE
    #01-011Threaded Inserts M30,20€NONE
    #01-011Screw Ball head0,20€NONE

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    STEP 3

    STEP 4

    STEP 5

    STEP 6

    STEP 7

    STEP 8

    STEP 9

    STEP 10

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/STAGE_Z_NEMA/index.html b/docs/PRODUCTION/Modules/STAGE_Z_NEMA/index.html index b0f4eb5bc..88b74918b 100644 --- a/docs/PRODUCTION/Modules/STAGE_Z_NEMA/index.html +++ b/docs/PRODUCTION/Modules/STAGE_Z_NEMA/index.html @@ -10,13 +10,13 @@ - - + +

    Z-Stage Motorized NEMA12 25mm

    This page describes how to assemble the kinematic XY mount for moving a laser/ pinhole.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device.

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl
    #01-012BaseSpring LocksPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011Spring0,20€NONE
    #01-011Threaded Inserts M30,20€NONE
    #01-011Screw Ball head0,20€NONE

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    STEP 3

    STEP 4

    STEP 5

    STEP 6

    STEP 7

    STEP 8

    STEP 9

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/PRODUCTION/Modules/TORCH/index.html b/docs/PRODUCTION/Modules/TORCH/index.html index 2daed27dc..bfb8962c9 100644 --- a/docs/PRODUCTION/Modules/TORCH/index.html +++ b/docs/PRODUCTION/Modules/TORCH/index.html @@ -10,13 +10,13 @@ - - + +

    Torch

    This page describes how to assemble the kinematic XY mount for moving a laser/ pinhole.

    Duration: 1

    Bill of material

    Below you will find all components necessary to build this device.

    3D printing files

    All these files need to be printed.

    Printing parameters:

    InfillLayerheightSpecial ProfileMaterial
    ~20%0.25/0.3mmmini/i3PLA (black)
    IDAmountTypeDetailsLink
    #01-011BaseHolds MirrorPart.stl
    #01-012BaseSpring LocksPart.stl

    Additional parts

    This is used in the current version of the setup

    IDAmountTypeDetailsPriceLink
    #01-011Spring0,20€NONE
    #01-011Threaded Inserts M30,20€NONE
    #01-011Screw Ball head0,20€NONE

    Assembly

    Duration: 1

    Below we describe how the device can be build and assembled in order to replicate the whole system.

    STEP 1

    All parts you need to assemble the module:

    STEP 2

    Design files

    The original design files are in the INVENTOR folder.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCN/index.html b/docs/Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCN/index.html index b66e41d1c..67091fa53 100644 --- a/docs/Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCN/index.html +++ b/docs/Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCN/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -23,7 +23,7 @@ 显微镜物镜: 一种特殊的透镜系统,可以放大物体

    接下来将通过一个视频介绍一个MiniBox(2022年10月的版本)里面有什么:

    什么是UC2?

    UC2项目的核心元素是一个简单的立方体。 这个立方体由两半部分组成,通常内有一个可以滑动的插件。 插件可以容纳各种光学元件(例如透镜、反光镜),这意味着每个立方体可以通过安装不同的插件来实现不同的功能。

    立方体类型1:带插头连接注塑成型件

    底板

    底板

    立方体可以安装在底板上。底板模块可以像拼图一样组合在一起。

    自行打印UC2

    UC2立方体也可以3D打印。它看起来与注塑模型相同,但这里它由立方体盖和立方体体组成,用螺丝固定在一起。螺丝非常适合放在磁性板上。通过结合不同的立方体模块,可以轻松组装不同的光学结构。每个骰子可以增加一个新功能。你的创造力没有限制。

    立方体类型2:带磁性连接的3D打印件

    立方体

    带磁铁的底板

    在3D打印的底板中有小的球形磁铁,立方体就放在这些磁铁上。

    想要更多的立方体?那你可以自行3D打印。你可以在这里找到所有信息

    这就是立方体如何组合在一起

    持续时间:1分钟

    确保立方体正确放置在板上,并且没有倾斜。最后重要的是插件放置在正确的位置。

    如果你没有看到清晰的图像,移动插件(例如透镜),直到你看到它很清楚。图片中的绿色箭头显示了如何做。

    这里你可以找到一个小视频,解释了立方体的核心概念

    文档内容:

    符号是什么意思?

    实验 如果你看到这个方块,说明有实验可做!你可以在这个方块上放置一个UC2立方体。
    解释:如果你看到这个图标,说明有东西可以学习!
    账单:这里有东西需要计算。拿起笔和纸开始解谜。
    注意:不要用手指触摸玻璃表面!
    清洁镜头:如果你已经触摸了镜头,你可以用眼镜布来清洁它。

    透镜能做什么?

    持续时间:2分钟

    拿一个或多个内有透镜的立方体,看着这里展示的UC2符号。手持立方体,改变透镜和图像之间的距离。

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreLens/index.html b/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreLens/index.html index d59fe6b4f..430a127fa 100644 --- a/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreLens/index.html +++ b/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreLens/index.html @@ -10,15 +10,15 @@ - - + +

    Lens

    Lenses

    In ray optics, light is represented as a bundle of rays (arrows), which simplifies the physical properties of light. A ray has a direction and is therefore drawn with an arrow. A lens "refracts" the beam, changing its direction.

    The focal length of a lens corresponds to the distance from the lens to the focal plane on which the focal point lies. It is given in millimeters (f = mm).

    Converging (positive) and diverging (negative) lenses

    Converging lenses refract the rays of light traveling parallel to the optical axis at a point called the focal point.

    The diverging lenses refract the rays of light traveling parallel to the optical axis as if they originated from a point called the "virtual" focus.

    Lenses “refract” the rays of light

    You can find the focal length of the lens as a printed number on the lens holder. The MiniBOX receives a 100mm converging lens, two 40mm converging lenses and a -50mm negative lens. The numbers indicate the focal length.

    The converging lens is also called a positive or convex lens. The middle part of the lens is always thicker than the edge.

    The converging lens enlarges the image. The magnification is different for the 40mm lens and the 100mm lens. The image can be upright or inverted.

    The negative lens (spreading lens) is sometimes also called a negative or concave lens. The middle part of the lens is always thinner than the edge.

    With the negative lens (here: -50 mm lens) the image is always reduced and always upright

    We assume that our lenses are so-called "thin lenses". This means we can consider them as one plane and not care about their thickness. This makes explanations and calculations much easier.

    Did the answers raise any more questions? Then drive to find out exactly how lenses work...

    Lens image

    Now take the lentil cubes. With the right lens, try to decipher the focal length information in the cubes shown. Move the lens over the writing until it is the same size as the "UC2" text.

    Can you see the text the same size and orientation as the "UC2"? What happens when you change the distance between the lens and the image?

    What happens if you use a lens with the wrong focal length?

    Image of an object through a positive lens

    Let's take the converging lens as an example. We start with an object (green arrow) and see what happens to the rays that start from the top. There are infinitely many rays in all directions, but for drawing the figure the following three rays will suffice:

    1. The centre beam (orange) passes undisturbed through the center of the lens.
    2. The focus ray (yellow) also starts from the tip of the arrow, but goes through the object-side focus at focal length f. After the lens, it continues at the same height, but now parallel to the optical axis.
    3. The parallel beam (red) initially runs parallel to the optical axis, but is then refracted at the lens in such a way that it passes through the focal point on the image side at focal length f.

    The image is formed where all the rays intersect. The principle is used for all points or the rays of an object emanating from them. Depending on which lens is used and depending on the position of the object, the properties of the image change, such as size, orientation and position.

    Image of an object through a negative lens

    In the case of the negative lens, we use the same method to image the ray path. Unlike the case of the converging lens, the image is always reduced and virtual. Magnification depends on the position of the object in front of the lens. Unlike the converging lens, the image is created on the object side and is therefore called a virtual image. You can see it directly with your eyes but not project it onto a screen.

    The way a lens creates an image is predictable by knowing the focal length of that lens. Therefore, a certain distance must be maintained so that you can see the writing with the specified lens on the previous sheet.

    The magnification and the location where the image is formed depend on the focal length of the lens and the distance between the lens and the object.

    With the diverging lens (f = -50 mm) you always see a reduced virtual image. A virtual image can only be viewed with the eye. So far we only have virtual ones seen pictures.

    The converging lens as a magnifying glass

    Take the UC2 lens cube with focal length f=40mm and use it as a magnifying glass.

    Can you read the small letters through the converging lens? What is written there?

    A lens in action can be found here:

    That's what converging lenses do

    With the converging lenses, the image and the magnification depend on the position of the object.

    If the distance between the object and the lens is more than twice the focal length of the lens, then the image is...

    • Vice versa
    • Swapped sides
    • Reduced
    • Real

    If the distance between the object and the lens is exactly twice the focal length of the lens, then the image is...

    • Vice versa
    • Swapped sides
    • Same size
    • Real

    If the distance between the object and the lens is more than the focal length and less than twice the focal length of the lens, then the image is...

    • Vice versa
    • Swapped sides
    • Magnified
    • real

    Object distance (g)

    The distance between the object and the lens plane is called g.

    Image width (b)

    The distance between the lens plane and the image formed by the lens is denoted as b.

    The converging lens can produce a real image. The real image can then be seen on a screen.

    That's why the magnifying glass enlarges

    Magnifying glass effect!

    If the distance between the object and the lens is less than the focal length of the lens, then the image is...

    • upright
    • right side up
    • Magnified
    • Virtual

    The magnifying glass is the simplest of all optical devices, since it consists only of a simple converging lens with a suitable focal length. Why does the cube with the 50 𝑚𝑚 enlarge the small text? If the object is in front of the focal length of the lens - i.e. less than 50 𝑚𝑚 in front of the lens - the lens creates a virtual image which is behind the actual object. The eye perceives it enlarged. Check out the diagram above.

    Calculate the magnification of the magnifying glass using the following formula:

    250 𝑚𝑚 is the distance of clear visual range - i.e. the distance between the object and the eye at which most people can read well. More on this later in the “accommodation” of the eye.

    How does a cinema projector work?

    Take the UC2 lens cube with focal length 𝑓 =40 𝑚𝑚 and place it behind the sample holder cube. The distance between the object and the lens (i.e. the object distance g) should be approx. 50 mm. If you now illuminate the object with the flashlight, you will see it sharply at a distance of approx. 200 mm on the wall. A cinema projector has a film strip instead of the object and of course a much stronger light source.

    Use a flashlight (e.g. from your cell phone) as a light source and hold it in front of the object

    Use the image or text on the microscope slide as the object

    How is the image oriented? Slide the lens back and forth in the cube and see when the image is in focus. Find the image for g = 50mm, 60mm, 65mm and measure the distance between the lens and the image.

    How does a cinema projector work?

    Where is the picture?

    When an object is imaged through a converging lens, the position and size of the image depend on the distance (g) of the object to the lens and its focal length (f). The lens equation describes the relationship between image distance (b) and object distance (g):

    How big is the picture?

    The magnification of the object on the screen can easily be calculated using the following formula:

    How the projector works

    Check if your observation agrees with the calculation

    Calculate the magnification of the projector for the different values of g and b.

    Our lens has a focal length of f= 40 mm.

    For g = 50mm → b = 200mm

    For g = 60 mm → b = 120 mm\

    For g = 65 mm → b = 104 mm\


    The projector always produces an enlarged, inverted (reversed) image. The position of the image and its magnification depend on the position and size of the object.

    Tutorial: Determining the Focal Distance of a Positive Lens

    Materials needed:

    • Light source (e.g., room's illumiation)
    • Positive lens
    • Screen (e.g. table, piece of paper, etc.)

    Instructions:

    1. Position the positive lens so that it faces the light source. Align a screen parallel to the focal plane of the lens.
    2. Modify the distance between the lens and the screen.
    3. Carefully observe and record the position at which the light source forms a clear image on the surface of the screen.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreTelescope/index.html b/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreTelescope/index.html index dc317e4ea..db75d6746 100644 --- a/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreTelescope/index.html +++ b/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreTelescope/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -28,7 +28,7 @@ Search for an object to the distance and use Galileo's telescope to look at it.

    What is a Kepler telescope?

    Set the lenses in the correct positions as shown in the diagram. Then look through the telescope into the distance.

    What does the picture look like? How is the image oriented?

    As you look through the telescope, vary the distances between the components to see such a sharp image!

    This is a Kepler telescope

    This type of telescope is often used in astronomy.

    This is how the Kepler telescope works

    What is the magnification of this Kepler telescope?

    Formula for calculating magnification

    This telescope can achieve a higher magnification than the Galilean telescope. But it creates the opposite picture. However, this is not a problem for observing the stars.

    The picture is always
    • Magnified by the magnification from the formula above
    • Vice versa
    • Sides reversed

    The field of view is larger than with the Galileo telescope.


    Tutorial: Kepler's Telescope

    Materials needed:

    • Eight base plates
    • 100 mm positive lens (in cube)
    • 50 mm positive lens (in cube)
    • Two empty cubes

    Diagram (side view):

    Instructions for assembling Kepler's telescope:

    Step 1: Align the cubes

    Align the cubes such that the two lenses lay at the extremes and the two empty cubes in the middle.

    Step 2: Fix the cubes with base plates

    Fix the cubes with the base plates placing them on top and on the bottom.

    Step 3: Adjust the distance

    Adjust the distance between the lenses as shown in the image.

    Step 4: Use Kepler's telescope

    Look for an object to the distance and use Kepler's telescope to look at it.

    What is a spotting scope?

    The spotting scope is long, so the scheme is not the same size. Set the lenses in the correct positions as shown in the diagram and look into the distance through the telescope.

    which results into

    How does the image here compare to the Kepler telescope?

    As you look through the telescope, adjust the distances between the components to see a sharp image!

    This is how the spotting scope works

    The magnification is like that of the Kepler telescope. The erecting lens only changes the orientation (the image is reversed), not the magnification.

    An upright image is necessary for terrestrial observations. True terrestrial telescopes use prism systems to rotate the image and keep it compact.

    The picture is
    • Magnified at the same magnification as the Keppler telescope
    • Upright
    • mirrored

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/ENGLISH/coreMicroscope/index.html b/docs/Toolboxes/DiscoveryCore/ENGLISH/coreMicroscope/index.html index 60df704fd..062d76d1b 100644 --- a/docs/Toolboxes/DiscoveryCore/ENGLISH/coreMicroscope/index.html +++ b/docs/Toolboxes/DiscoveryCore/ENGLISH/coreMicroscope/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -30,7 +30,7 @@

    Materials needed:

    • Microscope objective 4x
    • Microscope objective long mount with gear
    • Ramsden-Eyepiece (in cube)
    • Two non-kinematic mirrors (in cubes)
    • Sample holder (in cube)
    • Three empty cubes
    • 11 base plates
    • Smartphone base plate
    • Torch lamp
    • 50 mm lens (in cube)

    Diagram (Side view):

    Instructions for assembling the Smartphone Microscope:

    Step 1: Build a four-base plate line

    Step 2: Assemble the components

    Place the Microscope objective mount on one extreme followed by the two mirrors facing each other and one empty cube in the other extreme. Fix them with base plates.

    Step 3: Adjust the objective

    Build one cube with the microscope objective inside. Adjust the objective's height if necessary by using the gear.

    Step 4: Place the eyepiece

    Place the eyepiece next to the microscope objective and one empty cube next to it. Mind the right orientation of the eyepiece.

    Step 5: Align the smartphone base

    Place the smartphone base with the hole aligned with the eyepiece. Note: You can adjust the orientation of the smartphone base to adapt your smartphone's size.

    Step 6: Set up the sample holder

    Place the sample holder cube on top of the microscope objective. Mind the distance between them. You can adjust the coarse distance by sliding the sample holder inside the cube and the finer distance by using the gear.

    Step 7: Add the converging lens and lamp

    Place a converging lens cube on top of the sample holder cube and place the torch lamp on top. Place the smartphone aligned to the eyepiece.

    Step 8: Adjust for clarity

    Try to move the smartphone such that the whole eyepiece circle appears illuminated. Then, turn the gear to focus and get a sharp image of the specimen.

    Better with smartphone or eye?

    The smartphone camera has a lens with a very short focal length because it has to fit into the thin smartphone. The lens then creates an image on the camera sensor whose properties are similar to those of the human eye.

    The eye can see objects from both a distance and near. This property is called accommodation.

    The smartphone camera can also do this, but it is called autofocus. It describes the ability to sharply image objects at different distances on the sensor.

    The image from the eyepiece comes in parallel rays, as if coming from infinity. You observed with a relaxed eye (looking into the distance) or with a camera focused at infinity.


    Calculation results

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxEN/index.html b/docs/Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxEN/index.html index 893af2458..336e19fe6 100644 --- a/docs/Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxEN/index.html +++ b/docs/Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxEN/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -24,7 +24,7 @@ docs/01_Toolboxes/01_DiscoveryCore/IMAGES/MINIBOX/2.png

    If you don't see a sharp image, move the inserts (e.g. lens) until you see it clearly. The green arrow in the picture shows you how to do this.

    Here you can find a small video that explains the core concept of the cube

    What do the symbols mean?

    Duration: 2

    Experiment If you see this block, there is something to experiment with! You can place a UC2 cube on this block.
    Explanations: If you see this icon, there's something to learn!
    Invoices: There is something to calculate here. Take a pen and paper and start puzzles.
    Caution: Do not touch the glass surfaces with your fingers!
    Cleaning the lenses: If you have already touched the lens, you can clean it with a glasses cloth.

    What can a lens do?

    Duration: 2

    Take one or more of the cubes that have a lens in them and look at the UC2 symbol shown here. Hold the cube in your hand and change the distance between the lens and the image.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/FRENCH/CoreLensFR/index.html b/docs/Toolboxes/DiscoveryCore/FRENCH/CoreLensFR/index.html index 62e8d0790..0abc680ff 100644 --- a/docs/Toolboxes/DiscoveryCore/FRENCH/CoreLensFR/index.html +++ b/docs/Toolboxes/DiscoveryCore/FRENCH/CoreLensFR/index.html @@ -10,13 +10,13 @@ - - + +

    Lentille

    Lentilles

    En optique géométrique, la lumière est représentée comme un faisceau de rayons (flèches), ce qui simplifie les propriétés physiques de la lumière. Un rayon a une direction et est donc dessiné avec une flèche. Une lentille "réfracte" le faisceau, changeant sa direction.

    La distance focale d'une lentille correspond à la distance entre la lentille et le plan focal sur lequel se trouve le point focal. Elle est donnée en millimètres (f = mm).

    Lentilles convergentes (positives) et divergentes (négatives)

    Les lentilles convergentes réfractent les rayons lumineux se déplaçant parallèlement à l'axe optique en un point appelé le point focal.

    Les lentilles divergentes réfractent les rayons lumineux se déplaçant parallèlement à l'axe optique comme s'ils provenaient d'un point appelé le foyer "virtuel".

    Les lentilles “réfractent” les rayons lumineux

    Vous pouvez trouver la distance focale de la lentille comme un numéro imprimé sur le support de lentille. La MiniBOX contient une lentille convergente de 100 mm, deux lentilles convergentes de 40 mm et une lentille négative de -50 mm. Les numéros indiquent la distance focale.

    La lentille convergente est également appelée lentille positive ou convexe. La partie centrale de la lentille est toujours plus épaisse que le bord.

    La lentille convergente agrandit l'image. Le grossissement est différent pour la lentille de 40 mm et celle de 100 mm. L'image peut être droite ou inversée.

    La lentille négative (lentille divergente) est parfois aussi appelée lentille négative ou concave. La partie centrale de la lentille est toujours plus fine que le bord.

    Avec la lentille négative (ici : lentille de -50 mm), l'image est toujours réduite et toujours droite.

    Nous supposons que nos lentilles sont des "lentilles minces". Cela signifie que nous pouvons les considérer comme un plan unique sans nous soucier de leur épaisseur. Cela rend les explications et les calculs beaucoup plus simples.

    Les réponses ont-elles suscité d'autres questions ? Alors découvrez exactement comment fonctionnent les lentilles...

    Image de la lentille

    Prenez maintenant les cubes de lentilles. Avec la bonne lentille, essayez de déchiffrer les informations sur la distance focale dans les cubes montrés. Déplacez la lentille sur l'écriture jusqu'à ce qu'elle soit de la même taille que le texte "UC2".

    Pouvez-vous voir le texte de la même taille et orientation que le "UC2" ? Que se passe-t-il lorsque vous changez la distance entre la lentille et l'image ?

    Que se passe-t-il si vous utilisez une lentille avec la mauvaise distance focale ?

    Image d'un objet à travers une lentille positive

    Prenons la lentille convergente comme exemple. Nous commençons par un objet (flèche verte) et voyons ce qui arrive aux rayons qui partent du sommet. Il y a une infinité de rayons dans toutes les directions, mais pour dessiner la figure, les trois rayons suivants suffiront :

    1. Le rayon central (orange) passe sans être perturbé par le centre de la lentille.
    2. Le rayon focal (jaune) commence également à partir de la pointe de la flèche, mais passe par le foyer côté objet à une distance focale f. Après la lentille, il continue à la même hauteur, mais maintenant parallèlement à l'axe optique.
    3. Le rayon parallèle (rouge) se déplace d'abord parallèlement à l'axe optique, mais est ensuite réfracté à la lentille de manière à passer par le point focal côté image à une distance focale f.

    L'image se forme là où tous les rayons se croisent. Le principe est utilisé pour tous les points ou les rayons d'un objet émanant d'eux. Selon la lentille utilisée et selon la position de l'objet, les propriétés de l'image changent, telles que la taille, l'orientation et la position.

    Image d'un objet à travers une lentille négative

    Dans le cas de la lentille négative, nous utilisons la même méthode pour imager le chemin des rayons. Contrairement au cas de la lentille convergente, l'image est toujours réduite et virtuelle. Le grossissement dépend de la position de l'objet devant la lentille. Contrairement à la lentille convergente, l'image est créée du côté objet et est donc appelée une image virtuelle. Vous pouvez la voir directement avec vos yeux, mais ne pas la projeter sur un écran.

    La façon dont une lentille crée une image est prévisible en connaissant la distance focale de cette lentille. Par conséquent, une certaine distance doit être maintenue afin que vous puissiez voir l'écriture avec la lentille spécifiée sur la feuille précédente.

    Le grossissement et l'emplacement où l'image est formée dépendent de la distance focale de la lentille et de la distance entre la lentille et l'objet.

    Avec la lentille divergente (f = -50 mm), vous voyez toujours une image virtuelle réduite. Une image virtuelle ne peut être vue qu'avec l'œil. Jusqu'à présent, nous n'avons vu que des images virtuelles.

    La lentille convergente comme loupe

    Prenez le cube de lentilles UC2 avec une distance focale f = 40 mm et utilisez-le comme une loupe.

    Pouvez-vous lire les petites lettres à travers la lentille convergente ? Qu'est-ce qui est écrit là ?

    Une lentille en action peut être trouvée ici :

    Ce que font les lentilles convergentes

    Avec les lentilles convergentes, l'image et le grossissement dépendent de la position de l'objet.

    Si la distance entre l'objet et la lentille est supérieure à deux fois la distance focale de la lentille, alors l'image est...

    • Inversée
    • Côté opposé
    • Réduite
    • Réelle

    Si la distance entre l'objet et la lentille est exactement deux fois la distance focale de la lentille, alors l'image est...

    • Inversée
    • Côté opposé
    • Même taille
    • Réelle

    Si la distance entre l'objet et la lentille est supérieure à la distance focale et inférieure à deux fois la distance focale de la lentille, alors l'image est...

    • Inversée
    • Côté opposé
    • Agrandie
    • Réelle

    Distance de l'objet (g)

    La distance entre l'objet et le plan de la lentille est appelée g.

    Largeur de l'image (b)

    La distance entre le plan de la lentille et l'image formée par la lentille est désignée comme b.

    La lentille convergente peut produire une image réelle. L'image réelle peut ensuite être vue sur un écran.

    Pourquoi la loupe agrandit-elle ?

    Effet de loupe !

    Si la distance entre l'objet et la lentille est inférieure à la distance focale de la lentille, alors l'image est...

    • Droite
    • Côté droit
    • Agrandie
    • Virtuelle

    La loupe est le plus simple de tous les dispositifs optiques, car elle consiste uniquement en une simple lentille convergente avec une distance focale appropriée. Pourquoi le cube avec les 50 mm agrandit-il le petit texte ? Si l'objet est devant la distance focale de la lentille - c'est-à-dire à moins de 50 mm devant la lentille - la lentille crée une image virtuelle qui se trouve derrière l'objet réel. L'œil la perçoit agrandie. Consultez le schéma ci-dessus.

    Calculez le grossissement de la loupe en utilisant la formule suivante :

    250 mm est la distance de vision distincte - c'est-à-dire la distance entre l'objet et l'œil à laquelle la plupart des gens peuvent bien lire. Plus d'informations à ce sujet plus tard dans la section “accommodation” de l'œil.

    Comment

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/FRENCH/CoreTelescopeFR/index.html b/docs/Toolboxes/DiscoveryCore/FRENCH/CoreTelescopeFR/index.html index 529beec25..9cfdb41fa 100644 --- a/docs/Toolboxes/DiscoveryCore/FRENCH/CoreTelescopeFR/index.html +++ b/docs/Toolboxes/DiscoveryCore/FRENCH/CoreTelescopeFR/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -27,7 +27,7 @@ Cherchez un objet au loin et utilisez le télescope de Galilée pour le regarder.

    Qu'est-ce qu'un télescope de Kepler ?

    Placez les lentilles dans les positions correctes comme indiqué dans le schéma. Ensuite, regardez au loin à travers le télescope.

    À quoi ressemble l'image ? Comment est l'image orientée ?

    En regardant à travers le télescope, variez les distances entre les composants pour voir une image nette !

    Voici un télescope de Kepler

    Ce type de télescope est souvent utilisé en astronomie.

    Voici comment fonctionne le télescope de Kepler

    Quelle est l'augmentation de ce télescope de Kepler ?

    Formule pour calculer l'agrandissement

    Ce télescope peut atteindre un grossissement plus élevé que le télescope de Galilée. Mais il crée une image inversée. Cependant, ce n'est pas un problème pour observer les étoiles.

    L'image est toujours
    • Agrandie par le grossissement de la formule ci-dessus
    • Inversée
    • Côtés inversés

    Le champ de vision est plus grand qu'avec le télescope de Galilée.


    Tutoriel : télescope de Kepler

    Matériaux nécessaires :

    • Huit plaques de base
    • Lentille positive de 100 mm (dans un cube)
    • Lentille positive de 50 mm (dans un cube)
    • Deux cubes vides

    Schéma (vue latérale) :

    Instructions pour assembler le télescope de Kepler :

    Étape 1 : Alignez les cubes

    Alignez les cubes de sorte que les deux lentilles soient aux extrémités et les deux cubes vides au milieu.

    Étape 2 : Fixez les cubes avec des plaques de base

    Fixez les cubes avec les plaques de base en les plaçant en haut et en bas.

    Étape 3 : Ajustez la distance

    Ajustez la distance entre les lentilles comme montré dans l'image.

    Étape 4 : Utilisez le télescope de Kepler

    Cherchez un objet au loin et utilisez le télescope de Kepler pour le regarder.

    Qu'est-ce qu'une longue-vue ?

    La longue-vue est longue, donc le schéma n'est pas à la même taille. Placez les lentilles dans les positions correctes comme indiqué dans le schéma et regardez au loin à travers le télescope.

    ce qui donne

    Comment l'image ici se compare-t-elle au télescope de Kepler ?

    En regardant à travers le télescope, ajustez les distances entre les composants pour voir une image nette !

    Voici comment fonctionne la longue-vue

    Le grossissement est comme celui du télescope de Kepler. La lentille redressante ne change que l'orientation (l'image est inversée), pas le grossissement.

    Une image droite est nécessaire pour les observations terrestres. Les véritables télescopes terrestres utilisent des systèmes de prismes pour tourner l'image et la garder compacte.

    L'image est
    • Agrandie au même grossissement que le télescope de Kepler
    • Droite
    • Miroir

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/FRENCH/coreMicroscopeFR/index.html b/docs/Toolboxes/DiscoveryCore/FRENCH/coreMicroscopeFR/index.html index bbdd29872..b937d2182 100644 --- a/docs/Toolboxes/DiscoveryCore/FRENCH/coreMicroscopeFR/index.html +++ b/docs/Toolboxes/DiscoveryCore/FRENCH/coreMicroscopeFR/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -30,7 +30,7 @@

    Matériaux nécessaires :

    • Objectif de microscope 4x
    • Support long d'objectif de microscope avec engrenage
    • Oculaire de Ramsden (dans un cube)
    • Deux miroirs non cinématiques (dans des cubes)
    • Porte-échantillon (dans un cube)
    • Trois cubes vides
    • 11 plaques de base
    • Plaque de base pour smartphone
    • Lampe torche
    • Lentille de 50 mm (dans un cube)

    Schéma (vue latérale) :

    Instructions pour assembler le microscope pour smartphone :

    Étape 1 : Construire une ligne de quatre plaques de base

    ![](../IMAGES/MINIBOXTUTORIAL/image49

    .jpg)

    Étape 2 : Assembler les composants

    Placez le support d'objectif de microscope à une extrémité suivi des deux miroirs se faisant face et d'un cube vide à l'autre extrémité. Fixez-les avec des plaques de base.

    Étape 3 : Ajuster l'objectif

    Construisez un cube avec l'objectif de microscope à l'intérieur. Ajustez la hauteur de l'objectif si nécessaire en utilisant l'engrenage.

    Étape 4 : Placer l'oculaire

    Placez l'oculaire à côté de l'objectif de microscope et un cube vide à côté. Respectez la bonne orientation de l'oculaire.

    Étape 5 : Aligner la base du smartphone

    Placez la base du smartphone avec le trou aligné avec l'oculaire. Remarque : Vous pouvez ajuster l'orientation de la base du smartphone pour adapter la taille de votre smartphone.

    Étape 6 : Installer le porte-échantillon

    Placez le cube porte-échantillon au-dessus de l'objectif de microscope. Respectez la distance entre eux. Vous pouvez ajuster la distance approximative en faisant glisser le porte-échantillon à l'intérieur du cube et la distance plus fine en utilisant l'engrenage.

    Étape 7 : Ajouter la lentille convergente et la lampe

    Placez un cube de lentille convergente au-dessus du cube porte-échantillon et placez la lampe torche au-dessus. Placez le smartphone aligné avec l'oculaire.

    Étape 8 : Ajuster pour la clarté

    Essayez de déplacer le smartphone de manière à ce que tout le cercle de l'oculaire apparaisse illuminé. Ensuite, tournez l'engrenage pour focaliser et obtenir une image nette de l'échantillon.

    Mieux avec le smartphone ou l'œil ?

    L'appareil photo du smartphone a une lentille avec une distance focale très courte car elle doit s'adapter à l'épaisseur du smartphone. La lentille crée alors une image sur le capteur de la caméra dont les propriétés sont similaires à celles de l'œil humain.

    L'œil peut voir des objets à la fois de loin et de près. Cette propriété est appelée accommodation.

    L'appareil photo du smartphone peut également faire cela, mais cela s'appelle autofocus. Cela décrit la capacité à imager nettement des objets à différentes distances sur le capteur.

    L'image de l'oculaire provient de rayons parallèles, comme si elle venait de l'infini. Vous avez observé avec un œil détendu (regardant au loin) ou avec une caméra mise au point à l'infini.


    Résultats des calculs

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/FRENCH/uc2miniboxFR/index.html b/docs/Toolboxes/DiscoveryCore/FRENCH/uc2miniboxFR/index.html index dbed3bd82..20adef106 100644 --- a/docs/Toolboxes/DiscoveryCore/FRENCH/uc2miniboxFR/index.html +++ b/docs/Toolboxes/DiscoveryCore/FRENCH/uc2miniboxFR/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -24,7 +24,7 @@ docs/01_Toolboxes/01_DiscoveryCore/IMAGES/MINIBOX/2.png

    Si vous ne voyez pas une image nette, déplacez les inserts (par exemple, la lentille) jusqu'à ce que vous la voyiez clairement. La flèche verte sur l'image vous montre comment faire.

    Ici, vous pouvez trouver une petite vidéo qui explique le concept de base du cube

    Que signifient les symboles ?

    Durée : 2

    Expérience : Si vous voyez ce bloc, il y a quelque chose à expérimenter ! Vous pouvez placer un cube UC2 sur ce bloc.
    Explications : Si vous voyez cette icône, il y a quelque chose à apprendre !
    Calculs : Il y a quelque chose à calculer ici. Prenez un stylo et du papier et commencez les puzzles.
    Attention : Ne touchez pas les surfaces en verre avec vos doigts !
    Nettoyage des lentilles : Si vous avez déjà touché la lentille, vous pouvez la nettoyer avec un chiffon pour lunettes.

    Que peut faire une lentille ?

    Durée : 2

    Prenez un ou plusieurs cubes qui contiennent une lentille et regardez le symbole UC2 montré ici. Tenez le cube dans votre main et changez la distance entre la lentille et l'image.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/GERMAN/CoreLens/index.html b/docs/Toolboxes/DiscoveryCore/GERMAN/CoreLens/index.html index 2a46c4c51..e66f18c1e 100644 --- a/docs/Toolboxes/DiscoveryCore/GERMAN/CoreLens/index.html +++ b/docs/Toolboxes/DiscoveryCore/GERMAN/CoreLens/index.html @@ -10,15 +10,15 @@ - - + +

    Linse

    Linsen

    In der Strahlenoptik wird Licht als Bündel von Strahlen (Pfeilen) dargestellt, was die physikalischen Eigenschaften des Lichts vereinfacht. Ein Strahl hat eine Richtung und wird daher mit einem Pfeil gezeichnet. Eine Linse „bricht“ den Strahl und ändert dessen Richtung.

    Die Brennweite einer Linse entspricht der Entfernung von der Linse zur Brennebene, auf der der Brennpunkt liegt. Sie wird in Millimetern angegeben (f = mm).

    Sammellinsen (positiv) und Zerstreuungslinsen (negativ)

    Sammellinsen brechen die Lichtstrahlen, die parallel zur optischen Achse verlaufen, in einem Punkt, der Brennpunkt genannt wird.

    Die Zerstreuungslinsen brechen die Lichtstrahlen, die parallel zur optischen Achse verlaufen, so, als ob sie von einem Punkt, dem „virtuellen“ Brennpunkt, ausgingen.

    Linsen „brechen“ die Lichtstrahlen

    Sie können die Brennweite der Linse als aufgedruckte Zahl am Linsenhalter finden. Die MiniBOX erhält eine 100mm Sammellinse, zwei 40mm Sammellinsen und eine -50mm negative Linse. Die Zahlen geben die Brennweite an.

    Die Sammellinse wird auch positive oder konvexe Linse genannt. Der mittlere Teil der Linse ist immer dicker als der Rand.

    Die Sammellinse vergrößert das Bild. Die Vergrößerung unterscheidet sich zwischen der 40mm Linse und der 100mm Linse. Das Bild kann aufrecht oder invertiert sein.

    Die negative Linse (Streulinse) wird manchmal auch negative oder konkave Linse genannt. Der mittlere Teil der Linse ist immer dünner als der Rand.

    Mit der negativen Linse (hier: -50 mm Linse) wird das Bild immer verkleinert und immer aufrecht dargestellt.

    Wir gehen davon aus, dass unsere Linsen sogenannte „dünne Linsen“ sind. Das bedeutet, dass wir sie als eine Ebene betrachten können und uns nicht um ihre Dicke kümmern müssen. Das macht Erklärungen und Berechnungen viel einfacher.

    Haben die Antworten weitere Fragen aufgeworfen? Dann fahren Sie fort, um genau zu verstehen, wie Linsen funktionieren...

    Linsenbild

    Nehmen Sie jetzt die Linsenwürfel. Versuchen Sie mit der richtigen Linse, die Brennweiteninformation in den Würfeln zu entziffern. Bewegen Sie die Linse über die Schrift, bis sie dieselbe Größe wie der Text "UC2" hat.

    Können Sie den Text in derselben Größe und Ausrichtung wie das "UC2" sehen? Was passiert, wenn Sie den Abstand zwischen Linse und Bild ändern?

    Was passiert, wenn Sie eine Linse mit der falschen Brennweite verwenden?

    Bild eines Objekts durch eine positive Linse

    Nehmen wir die Sammellinse als Beispiel. Wir starten mit einem Objekt (grüner Pfeil) und sehen, was mit den Strahlen passiert, die von der Spitze ausgehen. Es gibt unendlich viele Strahlen in alle Richtungen, aber für die Zeichnung der Figur reichen die folgenden drei Strahlen aus:

    1. Der Zentralstrahl (orange) passiert ungestört das Zentrum der Linse.

    2. Der Fokusstrahl (gelb) startet ebenfalls von der Spitze des Pfeils, geht aber durch den objektseitigen Fokus bei Brennweite f. Nach der Linse geht er in gleicher Höhe, aber jetzt parallel zur optischen Achse weiter.

    3. Der Parallelstrahl (rot) verläuft zunächst parallel zur optischen Achse, wird dann aber an

      der Linse so gebrochen, dass er durch den bildseitigen Brennpunkt bei Brennweite f verläuft.

    Das Bild entsteht dort, wo alle Strahlen sich schneiden. Dieses Prinzip wird für alle Punkte oder die von ihnen ausgehenden Strahlen eines Objekts verwendet. Je nach verwendeter Linse und Position des Objekts ändern sich die Eigenschaften des Bildes, wie Größe, Orientierung und Position.

    Bild eines Objekts durch eine negative Linse

    Im Fall der negativen Linse verwenden wir dieselbe Methode, um den Strahlengang abzubilden. Anders als bei der Sammellinse ist das Bild immer verkleinert und virtuell. Die Vergrößerung hängt von der Position des Objekts vor der Linse ab. Anders als bei der Sammellinse wird das Bild auf der Objektseite erzeugt und daher als virtuelles Bild bezeichnet. Sie können es direkt mit Ihren Augen sehen, aber nicht auf einen Bildschirm projizieren.

    Die Art, wie eine Linse ein Bild erzeugt, ist vorhersehbar, wenn man die Brennweite dieser Linse kennt. Daher muss ein bestimmter Abstand eingehalten werden, damit Sie die Schrift mit der angegebenen Linse auf dem vorherigen Blatt sehen können.

    Die Vergrößerung und der Ort, an dem das Bild entsteht, hängen von der Brennweite der Linse und dem Abstand zwischen Linse und Objekt ab.

    Mit der Zerstreuungslinse (f = -50 mm) sehen Sie immer ein verkleinertes virtuelles Bild. Ein virtuelles Bild kann nur mit dem Auge betrachtet werden. Bisher haben wir nur virtuelle Bilder gesehen.

    Die Sammellinse als Lupe

    Nehmen Sie den UC2-Linsenwürfel mit einer Brennweite von f=40mm und verwenden Sie ihn als Lupe.

    Können Sie die kleinen Buchstaben durch die Sammellinse lesen? Was steht dort?

    Eine Linse in Aktion finden Sie hier:

    Das machen Sammellinsen

    Mit den Sammellinsen hängen Bild und Vergrößerung von der Position des Objekts ab.

    Wenn der Abstand zwischen Objekt und Linse mehr als das Doppelte der Brennweite der Linse beträgt, dann ist das Bild...

    • Umgekehrt
    • Seitlich getauscht
    • Verkleinert
    • Real

    Wenn der Abstand zwischen Objekt und Linse genau das Doppelte der Brennweite der Linse beträgt, dann ist das Bild...

    • Umgekehrt
    • Seitlich getauscht
    • Gleiche Größe
    • Real

    Wenn der Abstand zwischen Objekt und Linse mehr als die Brennweite und weniger als das Doppelte der Brennweite der Linse beträgt, dann ist das Bild...

    • Umgekehrt
    • Seitlich getauscht
    • Vergrößert
    • Real

    Objektabstand (g)

    Der Abstand zwischen dem Objekt und der Linsenebene wird als g bezeichnet.

    Bildweite (b)

    Der Abstand zwischen der Linsenebene und dem durch die Linse gebildeten Bild wird als b bezeichnet.

    Die Sammellinse kann ein reales Bild erzeugen. Das reale Bild kann dann auf einem Schirm gesehen werden.

    Deshalb vergrößert die Lupe

    Lupeneffekt!

    Wenn der Abstand zwischen dem Objekt und der Linse weniger als die Brennweite der Linse beträgt, dann ist das Bild...

    • Aufrecht
    • Richtig herum
    • Vergrößert
    • Virtuell

    Die Lupe ist das einfachste aller optischen

    Geräte, da sie nur aus einer einfachen Sammellinse mit geeigneter Brennweite besteht. Warum vergrößert der Würfel mit 50 𝑚𝑚 den kleinen Text? Wenn das Objekt vor der Brennweite der Linse liegt – also weniger als 50 𝑚𝑚 vor der Linse – erzeugt die Linse ein virtuelles Bild, das hinter dem eigentlichen Objekt liegt. Das Auge nimmt es vergrößert wahr. Schauen Sie sich das obenstehende Diagramm an.


    Berechnen Sie die Vergrößerung der Lupe mit der folgenden Formel:

    250 𝑚𝑚 ist der Abstand der klaren Sehweite – d. h. der Abstand zwischen dem Objekt und dem Auge, bei dem die meisten Menschen gut lesen können. Mehr dazu später bei der „Akkommodation“ des Auges.

    Wie funktioniert ein Kinoprojektor?

    Nehmen Sie den UC2-Linsenwürfel mit einer Brennweite von 𝑓 =40 𝑚𝑚 und platzieren Sie ihn hinter dem Probenhalterwürfel. Der Abstand zwischen dem Objekt und der Linse (also der Objektabstand g) sollte ca. 50 mm betragen. Wenn Sie das Objekt jetzt mit der Taschenlampe beleuchten, sehen Sie es in etwa 200 mm Entfernung scharf an der Wand. Ein Kinoprojektor hat anstelle des Objekts einen Filmstreifen und natürlich eine viel stärkere Lichtquelle.

    Verwenden Sie eine Taschenlampe (z. B. von Ihrem Handy) als Lichtquelle und halten Sie sie vor das Objekt

    Verwenden Sie das Bild oder den Text auf dem Mikroskopobjektträger als Objekt

    Wie ist das Bild ausgerichtet? Schieben Sie die Linse hin und her im Würfel und sehen Sie, wann das Bild scharf ist. Finden Sie das Bild für g = 50mm, 60mm, 65mm und messen Sie den Abstand zwischen der Linse und dem Bild.

    Wie funktioniert ein Kinoprojektor?

    Wo ist das Bild?

    Wenn ein Objekt durch eine Sammellinse abgebildet wird, hängen Position und Größe des Bildes von der Entfernung (g) des Objekts zur Linse und deren Brennweite (f) ab. Die Linsengleichung beschreibt die Beziehung zwischen Bildweite (b) und Objektabstand (g):

    Wie groß ist das Bild?

    Die Vergrößerung des Objekts auf der Leinwand kann einfach mit der folgenden Formel berechnet werden:

    Wie der Projektor funktioniert

    Überprüfen Sie, ob Ihre Beobachtung mit der Berechnung übereinstimmt

    Berechnen Sie die Vergrößerung des Projektors für die verschiedenen Werte von g und b.

    Unsere Linse hat eine Brennweite von f= 40 mm.

    Für g = 50mm → b = 200mm

    Für g = 60 mm → b = 120 mm\

    Für g = 65 mm → b = 104 mm\


    Der Projektor erzeugt immer ein vergrößertes, invertiertes (umgekehrtes) Bild. Die Position des Bildes und seine Vergrößerung hängen von der Position und Größe des Objekts ab.

    Tutorial: Bestimmung der Brennweite einer positiven Linse

    Benötigte Materialien:

    • Lichtquelle (z. B. Raumbeleuchtung)
    • Positive Linse
    • Schirm (z

    . B. Tisch, Stück Papier usw.)

    Anleitung:

    1. Positionieren Sie die positive Linse so, dass sie der Lichtquelle zugewandt ist. Richten Sie einen Schirm parallel zur Brennebene der Linse aus.
    2. Ändern Sie den Abstand zwischen Linse und Schirm.
    3. Beobachten und dokumentieren Sie sorgfältig die Position, an der die Lichtquelle ein klares Bild auf der Oberfläche des Schirms bildet.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/GERMAN/CoreTelescope/index.html b/docs/Toolboxes/DiscoveryCore/GERMAN/CoreTelescope/index.html index 940ca0387..dcadcbd82 100644 --- a/docs/Toolboxes/DiscoveryCore/GERMAN/CoreTelescope/index.html +++ b/docs/Toolboxes/DiscoveryCore/GERMAN/CoreTelescope/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -28,7 +28,7 @@ Suche ein Objekt in der Ferne und betrachte es durch Galileis Teleskop.

    Was ist ein Kepler-Teleskop?

    Setze die Linsen gemäß dem Diagramm in die richtigen Positionen. Dann schaue durch das Teleskop in die Ferne.

    Wie sieht das Bild aus? Wie ist die Bildausrichtung?

    Während du durch das Teleskop schaust, variiere die Abstände zwischen den Komponenten, um ein solch scharfes Bild zu sehen!

    Das ist ein Kepler-Teleskop

    Dieser Teleskoptyp wird oft in der Astronomie verwendet.

    So funktioniert das Kepler-Teleskop

    Welche Vergrößerung hat dieses Kepler-Teleskop?

    Formel zur Berechnung der Vergrö

    ßerung

    Dieses Teleskop kann eine höhere Vergrößerung als das Galilei-Teleskop erreichen. Es erzeugt jedoch ein umgekehrtes Bild. Das ist jedoch kein Problem bei der Beobachtung von Sternen.

    Das Bild ist immer
    • Vergrößert durch die Vergrößerung aus der obigen Formel
    • Umgekehrt
    • Seitlich vertauscht

    Das Sichtfeld ist größer als beim Galilei-Teleskop.


    Tutorial: Keplers Teleskop

    Benötigte Materialien:

    • Acht Bodenplatten
    • 100 mm positive Linse (im Würfel)
    • 50 mm positive Linse (im Würfel)
    • Zwei leere Würfel

    Diagramm (Seitenansicht):

    Anleitung zum Zusammenbau von Keplers Teleskop:

    Schritt 1: Würfel ausrichten

    Richte die Würfel so aus, dass die beiden Linsen an den Extremen liegen und die beiden leeren Würfel in der Mitte.

    Schritt 2: Würfel mit Bodenplatten fixieren

    Fixiere die Würfel mit den Bodenplatten, indem du sie oben und unten platzierst.

    Schritt 3: Abstand justieren

    Justiere den Abstand zwischen den Linsen, wie im Bild gezeigt.

    Schritt 4: Keplers Teleskop verwenden

    Suche ein Objekt in der Ferne und betrachte es durch Keplers Teleskop.

    Was ist ein Spektiv?

    Das Spektiv ist lang, daher ist das Schema nicht gleich groß. Setze die Linsen gemäß dem Diagramm in die richtigen Positionen und schaue durch das Teleskop in die Ferne.

    was zu folgendem führt

    Wie vergleicht sich das Bild hier mit dem Kepler-Teleskop?

    Während du durch das Teleskop schaust, justiere die Abstände zwischen den Komponenten, um ein scharfes Bild zu sehen!

    So funktioniert das Spektiv

    Die Vergrößerung ist wie beim Kepler-Teleskop. Die Umkehrlinse ändert nur die Orientierung (das Bild wird umgekehrt), nicht die Vergrößerung.

    Ein aufrechtes Bild ist für terrestrische Beobachtungen notwendig. Echte terrestrische Teleskope verwenden Prismensysteme, um das Bild zu drehen und kompakt zu halten.

    Das Bild ist
    • Mit der gleichen Vergrößerung wie das Kepler-Teleskop vergrößert
    • Aufrecht
    • Gespiegelt

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/GERMAN/coreMicroscope/index.html b/docs/Toolboxes/DiscoveryCore/GERMAN/coreMicroscope/index.html index a834a840a..3095683c5 100644 --- a/docs/Toolboxes/DiscoveryCore/GERMAN/coreMicroscope/index.html +++ b/docs/Toolboxes/DiscoveryCore/GERMAN/coreMicroscope/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -20,7 +20,7 @@ ![](../IMAGES/MINIBO

    XTUTORIAL/image33.png)

    Mikroskop mit "Unendlichkeitsoptik" und Okular

    Kannst du das mikroskopische Bild durch die Okularlinse mit deinen Augen sehen? Welchen Effekt hat der Spiegel? Richte das Mikroskop ohne den Spiegel ein. Stelle sicher, dass du immer noch zwei leere Räume zwischen der Tubuslinse und dem Okular hast. Was beobachtest du dann?

    Eine kurze Einführung in Spiegel und deren Anwendungen findest du hier:

    Wozu dient das Okular?

    Neuere Mikroskope sind mit sogenannten "Unendlichkeitsoptiken" ausgestattet. In diesem Fall erzeugt die Linse kein reales Zwischenbild. Das Licht verlässt die Linse als unendliche parallele Strahlen. Am Ende des "unendlichen" Tubus befindet sich eine Tubuslinse. Diese erzeugt ein Zwischenbild, das dann durch das Okular erneut vergrößert wird.

    Das Bild hinter dem Okular ist umgekehrt, umgekehrt, vergrößert und virtuell. Das virtuelle Bild kann mit dem Auge gesehen werden.

    Diese Konfiguration ist sehr nützlich in modernen Mikroskopen, da sie das Einfügen zusätzlicher Komponenten wie Filter zwischen Objektiv und Tubuslinse erlaubt, ohne den optischen Weg zu beeinflussen.

    Ein Filter kann verwendet werden, um die Helligkeit und Farbe des Bildes zu ändern.


    Das Okular ist dafür gut

    Wie groß ist die Vergrößerung nach dem Okular?

    Gesamtvergrößerung

    Ein Okular ist eigentlich nur eine Linse, die das Zwischenbild vergrößert. Es bildet das virtuelle Bild so ab, dass du es mit deinen Augen sehen kannst.

    Mit dem Spiegel kannst du nicht nur dich selbst sehen, sondern auch das einfallende Licht in jede Richtung reflektieren. So kannst du den optischen Weg falten und die Arbeit komfortabler gestalten. Der Spiegel beeinflusst zwar nicht die Vergrößerung, dreht aber das Bild in eine Richtung.

    Tutorial: Lichtmikroskop mit Unendlichkeitsoptik und Okular

    Benötigte Materialien:

    • Keplers Teleskop
    • Taschenlampe
    • Acht Bodenplatten
    • Probenhalter (im Würfel) mit Probe
    • Spiegel (im Würfel)
    • Leerwürfel
    • Okular (im Würfel)

    Diagramm (Seitenansicht):

    Anleitung zum Zusammenbau des Lichtmikroskops mit Unendlichkeitsoptik und Okular:

    Schritt 1: Probenhalter-Würfel hinzufügen

    Füge den Probenhalter-Würfel im Keplers Teleskop neben der 50-mm-Sammellinse hinzu.

    Schritt 2: Neben der 100-mm-Linse zusammenbauen

    Montiere neben der 100-mm-Sammellinse einen Leerwürfel und daneben den Spiegelwürfel.

    Schritt 3: Das Okular platzieren

    Platziere das Okular oben auf dem Spiegelwürfel mit der richtigen Orientierung. Beleuchte die Probe aus ein

    iger Entfernung.

    Schritt 5: Für ein scharfes Bild justieren

    Schaue durch das Okular. Justiere den Linsenabstand, bis du ein fokussiertes scharfes Bild siehst. Hinweis: Wenn du das Präparat nicht siehst, versuche vorsichtig die Position der Probe zu justieren, bis du das Präparat siehst.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/GERMAN/uc2miniboxDE/index.html b/docs/Toolboxes/DiscoveryCore/GERMAN/uc2miniboxDE/index.html index 4abb85758..c75503d29 100644 --- a/docs/Toolboxes/DiscoveryCore/GERMAN/uc2miniboxDE/index.html +++ b/docs/Toolboxes/DiscoveryCore/GERMAN/uc2miniboxDE/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -27,7 +27,7 @@ ||Rechnungen: Hier gibt es etwas zu rechnen. Nehmen Sie einen Stift und Papier und beginnen Sie mit den Rätseln. | ||Vorsicht: Berühren Sie die Glasoberflächen nicht mit Ihren Fingern! | ||Reinigung der Linsen: Wenn Sie die Linse bereits berührt haben, können Sie sie mit einem Brillentuch reinigen. |

    Was kann eine Linse bewirken?

    Nehmen Sie einen oder mehrere der Würfel, die eine Linse enthalten, und betrachten Sie das hier gezeigte UC2-Symbol. Halten Sie den Würfel in Ihrer Hand und ändern Sie den Abstand zwischen der Linse und dem Bild.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/Opticsintro/index.html b/docs/Toolboxes/DiscoveryCore/Opticsintro/index.html index 09f4b8df7..a13248167 100644 --- a/docs/Toolboxes/DiscoveryCore/Opticsintro/index.html +++ b/docs/Toolboxes/DiscoveryCore/Opticsintro/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -36,7 +36,7 @@

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/SPANISH/core_intro/index.html b/docs/Toolboxes/DiscoveryCore/SPANISH/core_intro/index.html index 9d6ca5380..ee945fd76 100644 --- a/docs/Toolboxes/DiscoveryCore/SPANISH/core_intro/index.html +++ b/docs/Toolboxes/DiscoveryCore/SPANISH/core_intro/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -21,7 +21,7 @@ Objetivo del microscopio: un sistema de lentes especial que agranda un objeto

    Un recorrido rápido a través de una caja ejemplar y cómo se ven los cubos (estado de octubre de 2022) se puede encontrar aquí:

    ¿Qué es UC2?

    El elemento central del proyecto UC2 es un cubo simple. El cubo consta de dos mitades y alberga un inserto deslizable. El inserto puede contener varios componentes ópticos (por ejemplo, lentes, espejos), lo que significa que se pueden implementar diferentes funciones con cada cubo.

    Tipo de cubo 1: moldeado por inyección con conexión de enchufe

    Base

    Base

    El cubo se puede montar en una placa base. Los módulos de la placa base se pueden juntar como un rompecabezas.

    UC2 para imprimir tú mismo

    El cubo UC2 también se puede imprimir en 3D. Se ve igual que el modelo moldeado por inyección, pero aquí consiste en una tapa de cubo y el cuerpo del cubo, que se mantienen unidos con tornillos. Los tornillos son excelentes para colocarlos en la placa magnética. Al combinar diferentes módulos de cubos, se pueden ensamblar fácilmente diferentes estructuras ópticas. Una nueva función se puede agregar con cada dado. Tu creatividad no tiene límites.

    Tipo de cubo 2: impreso en 3D con conexión magnética

    Dados

    Placa base con imanes

    En la placa base impresa en 3D hay pequeños imanes esféricos en los que se colocan los cubos.

    ¿Quieres más dados? Entonces puedes construirlos tú mismo. Puedes encontrar todo aquí

    Así es como encajan los dados

    Duración: 1

    Asegúrate de que los cubos estén colocados correctamente en la placa y no estén inclinados. Al final es importante que los insertos estén en el lugar correcto.

    Si no ves una imagen nítida, mueve los insertos (por ejemplo, lente) hasta que la veas claramente. La flecha verde en la imagen te muestra cómo hacerlo.

    Aquí puedes encontrar un pequeño video que explica el concepto central del cubo

    Of course, here's the translation of the provided text into Spanish:


    ¿Qué significan los símbolos?

    Duración: 2

    Experimenta Si ves este bloque, ¡hay algo con lo que experimentar! Puedes colocar un cubo UC2 en este bloque.
    Explicaciones: Si ves este icono, ¡hay algo que aprender!
    Facturas: Aquí hay algo que calcular. Toma un lápiz y papel y comienza a resolver rompecabezas.
    Precaución: ¡No toques las superficies de vidrio con tus dedos!
    Limpieza de las lentes: Si ya has tocado la lente, puedes limpiarla con un paño para gafas.

    ¿Qué puede hacer una lente?

    Duración: 2

    Toma uno o más de los cubos que tienen una lente y observa el símbolo UC2 mostrado aquí. Sostén el cubo en tu mano y cambia la distancia entre la lente y la imagen.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryCore/Smartphone Microscope/index.html b/docs/Toolboxes/DiscoveryCore/Smartphone Microscope/index.html index 0bf4f5385..8c03d4089 100644 --- a/docs/Toolboxes/DiscoveryCore/Smartphone Microscope/index.html +++ b/docs/Toolboxes/DiscoveryCore/Smartphone Microscope/index.html @@ -10,13 +10,13 @@ - - + +

    openUC2 Smartphone Microscope with a finite corrected objective lens

    This video shows you how to build the UC2 smartphone microscope as also indicated in the PDF manual. It shows some tricks how to make it more stable and how to operate the Z-stage

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/index.html b/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/index.html index 9f7013bdc..afc4ea09c 100644 --- a/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/index.html +++ b/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/index.html @@ -10,13 +10,13 @@ - - + +

    The Course BOX Alignment Procedure (Finite Optics)

    This is the alignment procedure of the experiments with finite-corrected optics. If you are looking for the infinity-corrected setups click here.

    First experiment: Finite-corrected Microscope with Köhler Illumination

    This experiment demonstrates the essential parts of a microscope and explains the concept of conjugate planes. The key components are: Light source, Collector lens, Field Diaphragm, Aperture Diaphragm, Condenser lens, Sample, Objective lens and Eyepiece lens.

    The Aperture set of conjugate planes: Lamp filament, Aperture diaphragm, Back Focal Plane of the objective, Exit pupil of the eye.

    The Field set of conjugate planes: Field diaphragm, Sample plane, Primary Image Plane, Retina.

    Second experiment: Abbe Diffraction Experiment

    The famous Abbe Diffraction Experiments shows how diffraction of light by a specimen (and interference with the illuminating light) creates an image and how collection of diffracted light defines the resolution of the microscope. With this setup it is possible to view both sets of conjugate planes at the same time, with one's eye or a camera.

    The Aperture set of conjugate planes: Lamp filament, Aperture diaphragm, Back Focal Plane of the objective, Mirror surface in the side arm, Retina.

    The Field set of conjugate planes: Field diaphragm, Sample plane, Primary Image Plan, Retina.

    We propose to use a diffraction grating as a sample and spatial filter in the BFP.

    This tutorial will lead you step-by-step through the alignment of the Finite-corrected Microscope, Köhler Illumination and Abbe Diffraction Experiment.

    1. Start with 1×13 baseplate and all the cubes:
    • Flashlight Cube (1)
    • Collector Lens Cube (2)
    • 2× Circular Aperture Cube (3)
    • Condenser Lens Cube (4)
    • Sample Cube (5)
    • Objective Lens Cube (6)
    • Objective in Z-Stage Cube (7)
    • Eyepiece Lens Cube (8)
    • 4× Screen Cube - 1× with white paper, 3× with lens tissue (9)

    2. Start by placing the sample - we will build the microscope around it.

    3. Place the Primary Image Plane (PIP) by definition: the distance is 200 mm from sample to PIP when using the 4× objective (finite-corrected for 160 mm, 40 mm working distance). Use the Sample cube with white paper as a screen.

    1. Place the objective lens. It is a single plano-convex lens with f' = 35 mm.

    2. Use direct illumination from the flashlight with! its lens. Adjust the position of the objective lens - focus the image on PIP by moving the lens back or forth.

    • Focussing Trick: Firstly move the whole objective lens cube in one direction (away from the sample). If the image sharpness in PIP improves, slide the insert in that direction. If the image sharpness in PIP get worse, slide the insert in the opposite direction, towards the sample. Continue until you get a focussed image of your sample on the PIP.

    1. Place the eyepiece lens behind the PIP. It is a single plano-convex lens with f' = 40 mm. Exchange the PIP screen with a semitransparent screen (lens tissue). While looking through the eyepiece, focus it on the PIP. Use the Focussing Trick again. The position within the cube of the sample holder for the paper screen and for the semitransparent screen has to be identical.

    2. Take away the screen from PIP. To dim the flashlight, put a piece of lens tissue in front of it. Look through the eyepiece - you should see a sharp image of your sample.

    3. Place the Field Diaphragm (FD). The position was chosen in order to work well with the availible lenss.

    1. Place the condenser lens. It is a single plano-convex lens with f' = 40 mm.

    2. Place the PIP screen back to its position.

    3. Place the flashlight on one end of the baseplate. Close the FD.

    4. Adjust the position of the condenser lens - focus the image of the FD on PIP by moving the lens back or forth (Focussing Trick). Once you see a sharp image of the closed FD on the screen in PIP, open and close the aperture and observe its effect.

    5. Remove the screen, look throught the eyepiece and check whether you see a sharp image of the closed FD.

    6. Place the Aperture Diaphragm (AD) into the Front Focal Plane (FFP) of the condenser lens (40 mm).

    • Focal Plane Trick - In case you don't know where exactly the FFP is, use this:

    • Use the Laser Cube with Beam Expander Cube to produce a collimated beam. Place the condenser lens in the collimated beam and find focus.

    • Place the AD into the same plane - slide in within the cube. Careful - push the Aperture from one side to keep it together.

    1. Remove the lens of the flashlight. The position of the flashlight remains.

    1. Place the collector lens. It is a single plano-convex lens with f' = 50 mm.

    2. Remove the FD. Close the AD. Center the flashlight with respect to the AD. Focus the image of the LED on the AD by adjusting the position of the collector lens (Focussing Trick).

    3. Place the FD back in the illumination path. Now the Köhler illumination is properly aligned.

    4. Place a semitransparent screen into the Back Focal Plane (BFP) of the objective lens. Close the AD and check that you see a focussed image of the AD in the BFP.

    5. Remove all screens. Use a lens tissue to dim the light and look through the eyepiece. Observe the effect of opening and closing the apertures.

    • Left: both apertures open. Middle: FD closed. Right: AD closed.

    1. Exhange objective lens with 4× objective in Z-Stage. Place a screen in PIP and observe the effect of opening and closing the apertures in the PIP.

    • Top: both apertures open. Middle: AD closed. Bottom: FD closed.

    1. Remove the screen. This is an aligned finite-corrected microscope with Köhler illumination.

    1. By adding 4 more cubes and another baseplate, we will now create the Abbe Diffraction Experiment. The extra cubes are:
    • Beamsplitter Cube (1)
    • Mirror 45° Cube (2)
    • Eyepiece Lens Cube (different f' than the previously used one!) (3)
    • Relay Lens Cube (4)

    24. Remove the eyepiece (40 mm) and add the 4×2 baseplate. Exchange the 4× objective with the objective lens (35 mm).

    25. Place the Beamsplitter Cube.

    1. Place the screen into the PIP, in front of the Beamsplitter. Place a new eyepiece lens behind the Beamsplitter. It is a single plano-convex lens with f' = 100 mm. While looking through the eyepiece, focus it on the PIP (Focussing Trick).

    2. In the other arm we will observe the BFP. There are two options, depending on how you place the Mirror in the next step.
    • Option 1: Place the mirror as shown in the picture.
    • Place the eyepiece lens (40 mm).

    • Option 2: Place the mirror as shown in the picture.
    • Place the baseplate connector on the mirror cube.
    • Place the eyepiece cube (40 mm) on the connector on the mirror cube.

    1. Place the relay lens after the Beamsplitter. It is a single plano-convex lens with f' = 75 mm.

    2. Place a semitransparent screen in the BFP of the objective lens. Close AD and look through the eyepiece of the side arm. You should see a sharp image of the AD on the BFP.

    3. This is the Abbe Difraction Experiment. Through the eyepieces you can see both sets of conjugate planes at the same time.

    Participate

    If you have a cool idea, please don't hesitate to write us a line, we are happy to incorporate it in our design to make it even better.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/index.html b/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/index.html index b30f8a212..740f3ddc9 100644 --- a/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/index.html +++ b/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -52,7 +52,7 @@ We use a laser pointer as a light source and expand it using two lenses. The imaging path is the same as in the microscope in the first experiment and therefore we can observe the image of our sample in the main arm and the image of the BFP in the side arm.

    1. We use a very fine fish net as a sample here. You could try a net like this one. Another idea is to try one of these plastic tea bags. Or a diffraction grating.

    2. We provide a circular aperture and a rectangular aperture to be used in the BFP. We also suggest to use an opaque dot (a small dot made with some marker or pain on a microscope slide) and a thin line object (like this tiny hex key here). The apertures block the light from the outside while the dot and line can block the center of the light path.

    3. As mentioned earlier, we keep the imaging path in both main arm and side arm. Remove the illumination part of the microscope and also the Eyepiece.

    4. Besides the apertures that we already prepared, we will need :

    • Laser Cube with laser pointer (1)
    • 1× Lens Cube with 50 mm lens (2)

    For now, place the camera in PIP.

    1. The laser is equipped with a cap that holds a lens from the RasPi camera. Make sure to put it on, otherwise you won't be able to create an expanded parallel beam.

    2. Place the Laser cube on the baseplate as shown in the picture.

    Careful! Do not hit anybody's eyes with the laser beam. Keep the laser off if you're not using it at the moment. Always point the laser away from people. Block the light if it's leaving the table you're working on.

    1. Place the lens for beam expansion behind the Laser cube as shown in the picture. It is a single plano-convex lens with f' = 50 mm. Align the lens to illuminate your Sample with a collimated beam - the diameter of the beam should be the same just after the lens cube and also far away from it. When you beam is well-collimated, the distance between the laser+lens duo and the Sample doesn't matter.

    2. You can switch the camera between the PIP and the BFP. You could also use two cameras, one in PIP and one in BFP, if you have them.

    3. Between the objective and the Beamsplitter is the Back Focal Plane of the Objective. You can see it if you put a piece of paper there - you will see the Fourier transform of the sample. You should see the same on camera in the side arm.

    4. In the PIP, you can see an image of the sample. Here we see our fish net. Align the camera to obtain a sharp image.
      ⭐ Because of the Talbot effect you can find more than one sharp image of the sample. Therefore, partially close the Field diaphragm (FD) and find the position of the camera where you not only see a sharp image of the grating (fish net) but also of the FD.

    1. In the BFP image in the side arm, you can see the Fourier transform of the grating just as it looks in the BFP itself. Align the second Relay lens to obtain a focussed image on the camera.
      ⭐ The grating is regular in both X and Y and therefore it's a very convenient sample for this experiment, because its Fourier transform is easily predictable. With a different sample the BFP will of course also look differently.

    ⭐ Back Focal Plane

    The intensity peaks in the BFP are the diffraction orders of our sample. By placing an aperture or another object here we’ll be able to modify the information transmitted through the microscope that contributes to the image. Depending on the aperture we can observe different effects.

    • Circular aperture: The circular aperture blocks the light symmetrically from outside towards the center. Close the aperture and align the laser such that the 0th order is in the center of the aperture. You can align the laser using the four screws in its holder.

    • Rectangular aperture: The rectangular aperture closes independently from both sides in X and Y direction (horizontally and vertically). Use a hex key or a similar tool to close/open the aperture doors.

    • Dot and line: Use a sample holder cube or your (presumably steady) hand to hold these two. You can block the 0th or 0th+1st orders with the dot, depending on how big it is. You can block the X-0th or Y-0th order with the line-object.

    1. This is the setup for the second experiment: Abbe Diffraction Experiment.

    ⭐ Abbe Diffraction experiment - What do we see?

    1. With no aperture in the BFP, we see the image of the Sample in PIP and the Fourier transorm of the sample in the BFP, as we just aligned it and prepared it.

    2. Firstly we use the Circular aperture. As we slowly close it and change the diameter of the transmitting area, we cut out the higher diffraction orders that carry the high frequency information, hence the fine details. In the image plane we see how these details blur and the sharp edges soften. The more orders we cut out, the blurrier the image gets.

    3. Using the Rectangular aperture, we can block the diffraction orders more selectively. When we close the aperture in the X direction to only let through the Y-0th orders, the square pattern of the image disappears, and we have only lines. This is because there is no X order that would transmit the information about the shape in the perpendicular direction.

    4. When we do the same trick in the other direction, we then see lines of the other orientation but again no square pattern.

    5. Closing the aperture in both X and Y direction, we eventually block all the higher orders that form the image of the sample. As we can see here, when only the 0th order is transmitted all image information is lost. What we see is only some background noise.

    6. On the other hand, when we block only the 0th order but keep all the others (we do this using the dot on a slide), we are still able to see the pattern is preserved, because all the orders still have a corresponding partner to interfere with on the other side from the 0th order. But now we are in a so-called dark field imaging mode. We'll explain it in the next steps.

    7. We can even block the 0th and 1st order by simply using a bigger dot in the BFP. We are still able to recognize the square pattern but the high frequency information, the noise, is taking over the image.

    8. When using the line object instead of the dot, we can block the 0th order completely in the Y direction and see what it does to the image. We still see the square pattern but suddenly, in the X direction, it seems that we have twice as many squares. This is the dark field imaging effect but in X only. We’re seeing just the edges and because there are two edges per square in one direction, it appears that we see them twice.

    9. The same works also in the perpendicular direction - blocking the 0th order in X results in the dark field imaging mode in Y.

    10. Using the rectangular aperture again and we can find out what is the minimal amount of orders that we need to form a reliable image. We said that they always interfere with the 0th order, so we don't need both sides. Therefore, we close the aperture and let through only one quarter of the orders. We can block the higher orders as well, as they only carry the high frequency information, and we are still able to see the basic pattern of our sample.

    ⭐ Watch the video of this experiment!

    UC2 YouSeeToo - Abbe Experiment Demonstration

    Notes to the video:

    • In this demonstration of the experiment, two Alvium cameras from Allied vision are used, so we can show the PIP and BFP on the screen simultaneously
      • Find the cubes for the Alvium cameras here anch choose the adjustable insert for easy alignment.
    • The optical path is different from the one described in this tutorial. This is because of the use of the above mentioned cameras
      • The objective and eyepiece are both lenses with f' = 100 mm. The magnification of the microscope is therefore equal to 1. The "magnified" image is just a zoom into the camera view.
      • Thanks to the use of a 10 mm lens as an objective, the diffraction orders in BFP are more separated and easily accessible.
      • In the side arm, the first lens has f' = 100 mm and the second lens f' = 50 mm. The image of the BFP is therefore demagnified twice, to fit better in the field of view of the camera.

    Bonus question: This magical image was taken by the RasPi camera in the BFP with the fish net as a sample. If you tell me what created this effect, I send you a chocolate ;-)

    Participate

    If you have a cool idea, please don't hesitate to write us a line, we are happy to incorporate it in our design to make it even better.

    References:

    1; 2; 3; Cat image source;
    4 Advanced Optical Imaging Workshop; Plymouth; Noah Russell, 2009©

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryDiffraction/MicroscopyCore/index.html b/docs/Toolboxes/DiscoveryDiffraction/MicroscopyCore/index.html index c9c269a86..c54227eaa 100644 --- a/docs/Toolboxes/DiscoveryDiffraction/MicroscopyCore/index.html +++ b/docs/Toolboxes/DiscoveryDiffraction/MicroscopyCore/index.html @@ -10,13 +10,13 @@ - - + +

    MicroscopyCore

    The first version of this BOX was designed for the "Principles of Light Microscopy" Course of the Light Microscopy Facility of MPI-CBG in Dresden. The alignment procedure was developed and optimised by Sebastian Bundschuh. It follows the lectures of Peter Evennett that can be found here.

    CourseBOX teaches the core principles of microscopy and basics of optical alignment. It is intended for microscopy courses for students that are rather on the side of users than designers. This BOX provides a hands-on experience with insight into the black box that microscope often seems to be. It comes with alignment tutorials and relies on basic components. By reusing the components and starting from the common ground, it shows that all the microscopy methods are based on only a few principles.

    It is not yet a comprehensive and optimised toolbox but rather a collection of modules and experiments that are frequently taught in microscopy courses. There is still space for improvement and we're hoping that more universities and institutions adopt the CourseBOX in their courses, which will lead to it's improvement and (if successful) production.

    Build the BOX

    A list of 3D-printed parts and necessary components is found in BUILD_ME, together with assembly guidelines and some printing tips and tricks.

    Setups

    What can you build with the CourseBOX?

    Compound microscope with proper Köhler illumination (finite optics)

    A finite corrected microscope with proper Köhler illumination. All conjugate planes are accessible.

    LINK for the detailed alignment procedure with image tutorial.

    Abbe Diffraction Experiment (finite optics)

    Classical experiment for explaining Fourier transform done by a lens. The illumination stays the same as in the previous experiment, but a beamsplitter and a relay lens are added, for simultaneous observation the Primary Image Plane and the Back Focal Plane.

    LINK for the detailed alignment procedure with image tutorial.

    Compound microscope with proper Köhler illumination (infinity optics)

    An infinity corrected microscope with proper Köhler illumination. All conjugate planes are accessible.

    LINK for the detailed alignment procedure with image tutorial.

    Abbe Diffraction Experiment (infinity optics)

    Classical experiment for explaining Fourier transform done by a lens. The setup of the previous experiment is reused and a laser pointer is added as a light source.

    LINK for the detailed alignment procedure with image tutorial.

    Coming soon:

    Laser Scanning Confocal Microscope

    Laser Scanning system is built on the detection side of the same setup. Scanning mirror can be rotated around one axis, which results in the translation of the point on the sample.

    Light Sheet Microscope

    The principle of Selective Plane illumination Microscopy is demonstrated with white light. The illumination path stays the same, only the collector lens is exchanged for a cylindrical one. The detection path is rotated by 90°.

    Participate

    If you have a cool idea, please don't hesitate to write us a line, we are happy to incorporate it in our design to make it even better.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryDiffraction/index.html b/docs/Toolboxes/DiscoveryDiffraction/index.html index 23cf6ca84..db087785f 100644 --- a/docs/Toolboxes/DiscoveryDiffraction/index.html +++ b/docs/Toolboxes/DiscoveryDiffraction/index.html @@ -10,15 +10,15 @@ - - + +

    Building The CourseBOX

    This is a guide for building the CourseBOX. If you were looking for another BOX version, click here.

    This guide currently describes how to build both the finite-corrected version and the infinity version of the CourseBOX.

    Content of each section

    1. Shopping
    2. 3D printing
    3. Tools
    4. Assembly

    Infinity Optics

    • Total price: 300 € without a Raspberry Pi (190 €) (assuming only material costs and printing everything yourself)
    • Printing time including preparation: 4 days
    • Assembly time: 1 day

    Shopping

    What to buy

    • Check out the RESOURCES for more information!
    Link - name of partAmountCommentPrice
    3D printing material~580 gChoose material that works with your 3D printer. If unsure, have a look at the guide in 3D printing section15 €
    Lens 50 mm5 piecesá 21 €
    Lens 100 mm3 piecesArtikel 2004á 6 €
    Lens for Eye Cube1 piece551.OALá 5 €
    Beamsplitter1 pieceArtikel 2137á 27 €
    Flashlight1 pieceLight source for microscope.á 7 €
    Laser Pointer1 pieceLight source for the Abbe experiment. With this very one you don't even need the flashlight. More information below.á 5 €
    Magnets64 piecesWhen using 3DP Cubes and baseplates. Ball magnets, diameter 5 mm.total 20 €
    Screws7 piecesM2×16 - 6 pieces; M2 nut - 6 pieces; M3×18 - 4 piecestotal ~2 €
    Screws112 piecesFor 3DP Cubes extra: (Art.-Nr. 00843 12) M3×12, galvanized steel - 64 pieces; (Art.-Nr. 00843 8) M3×8, galvanized steel - 64 pieces (or 128 pieces of M3×12) - BUT for this setup it isn't necessary to have screws on both sides ; (Art.-Nr. 025505 8) M5×8, galvanized steel - 96 pieces (half if one-sided)total ~4 €
    Raspberry Pi with accessoriesHave a look in our Bill-of-Materials for a complete list and links.190 €
    Chocolate1 barUse it as a reward when you're done.

    2 in 1 light source

    We propose the use of this Laser Pointer, because it also has a white LED. But we need to smartly adapt this for our experiments.

    • For beam expansion of the laser light, print a laser cap from the STL folder and put in it the RasPi lens that you removed from the camera
    • The LED here has a lens which cannot be removed. To be able to find a focussed image of your light source, take a thin permanent marker and make a cross on the surface of this lens. Now you'll be able to see a focussed image of the cross in the Field set of Aperture planes

    3D Printing:

    Parts

    To acquire the STL-files use the UC2-Configurator. The files themselves are in the RAW folder. The BOXes can be built using injection-moulded (IM) or 3D-printed (3DP) cubes.

    Note on the lens holders: If you use some other lens, you can generate a holder for it using our openSCAD design. Go to the Thingiverse page of this lens holder and use their in-built customizer to change the parameters of the insert.

    Completely new to 3D printing? Have a look into this beginner's guide!

    Our quick printing tutorial can be found here: UC2 YouSeeToo - How to print the base-cube?

    We have a good experience with this printer and settings:

    • Prusa i3/MK3S
      • Prusament PLA 1,75 mm, for one Box: 0,58 kg = 195 m = 90 hours = 15 €
      • Profile Optimal 0,15 mm, infill 20%, no support, 215/60°C

    Which tools to use

    ToolImageComment
    Electric screw driver with 2,5 mm hex bitFor putting the cubes together using M3×12 and M3×8 screws.
    2,5 mm hex keyFor fine adjustment of all the M3 screws if needed.
    Needle-nose PliersMight come handy

    Assembly

    Part - linkResultComment
    Baseplates16× baseplate puzzle
    Lens Cubes8× Lens Cube: 5× Lens Cube with 50 mm lens; 3× Lens Cube with 100 mm lens.
    Sample Cubes2× Sample Holder Cube
    Flashlight Cube1× Flashlight Cube
    Circular Aperture Cube2× Circular Aperture Cube
    Rectangular Aperture Cube1× Rectangular Aperture Cube
    Beamsplitter Cube1× Beam Expander Cube
    RasPi Camera Cube1× Camera Cube with Raspberry Pi camera with the lens removed
    Laser Cube1× Laser Holder Cube and Laser Clamp with a cap for RasPi lens
    Eye Cube1× Eyeball Cube

    Software

    Prepare the Raspberry Pi following our tutorial in UC2-Software-GIT!

    Done! Great job!


    Finite-corrected Optics

    • Printing time including preparation: 5 days
    • Assembly time: 1 day

    Shopping

    What to buy

    • Check out the RESOURCES for more information!
    Link - name of partAmountCommentPrice per amount used
    3D printing material~620 gChoose material that works with your 3D printer. If unsure, have a look at the guide in 3D printing section20 €
    Microscope objective 4×1 piece10 €
    Lens 35 mm1 pieceWe did the alignment with lenses of these focal lengths, but other combination are also possible. The alignment principle stays the same, but the positions of the element will be different.22 €
    Lens 40 mm2 pieces44 €
    Lens 50 mm1 piece21 €
    Lens 75 mm1 piece20 €
    Lens 100 mm1 piece20 €
    Flashlight1 pieceLight source for the projector and microscope.7 €
    Magnets128 piecesBall magnets, diameter 5 mm.30 €
    Screws~120 piecesM3×12, galvanized steel - ~90 pieces; M3×8, galvanized steel - ~90 pieces; M3×18, galvanized steel - 2 pieces; M3×30, not magnetic - 1 piece; M3 nut~15 €
    Chocolate1 barUse it as a reward when you're done.

    3D Printing:

    Completely new to 3D printing? Have a look into this beginner's guide!

    Our quick printing tutorial can be found here: UC2 YouSeeToo - How to print the base-cube?

    We have a good experience with this printer and settings:

    • Prusa i3/MK3S
      • PLA 1,75 mm, for one Box: 0,6 kg = 235 m = 85 hours = 20 €
      • Profile Optimal 0,15 mm, infill 20%, no support, 215/60°C

    Note: The design of the mechanical Z-stage has recently been changed. The files here are not yet up-to-date. Please check the Mechanical Z-stage for the latest version. Same applies to the Lens Holder available here

    Housing

    Name of part - Link to STL fileAmount
    (01) Basic Cube 2×11 piece
    (02) Basic Lid 2×11 piece
    (03) Basic Cube 1×120 pieces
    (04) Basic Lid 1×120 pieces
    (05) Baseplate 4×14 pieces
    (06) Baseplate 4×21 piece
    (07) Baseplate 1×11 piece
    (08) Baseplate Connector 1×11 piece

    Inserts

    Name of part - Link to STL fileAmountComment
    (09) Z-Stage Focusing Insert1 pieceRotate the part in your slicer before printing. Always print it laying on the flat side.
    (10) Z-Stage Objective Mount1 pieceFor mounting the objective lens (RMS thread).
    (11a) Z-Stage Bottom Plate1 pieceThe plate holds the gear and screw in position, allowing the only to rotate but not to wobble.
    (11b) Z-Stage Top Plate1 pieceThe plate holds the gear and screw in position, allowing the only to rotate but not to wobble.
    (12) Z-Stage Gear1 pieceKindly borrowed from openflexure.
    (13) Lens Holder6 piecesDiameter fits for the listed lenses (25 mm).
    (14) Lens Holder Clamp6 piecesDiameter fits for the listed lenses (25 mm).
    (15) Cylindrical Lens Holder1 pieceDiameter fits for the listed lenses (25 mm).
    (16) Generic Sample Holder5 piecesIn the SimpleBOX, it is used to hold the object in the projector setup.
    (17) Generic Sample Holder Clamp5 piecesTo fix the sample.
    (18) Mirror Holder 45° 30×30mm²1 pieceSize fits for the listed mirrors.
    (19) Flashlight Holder2 pieces
    (20) Circular Aperture Guide2 pieces
    (21) Circular Aperture Wheel2 pieces
    (22) Circular Aperture Lid2 pieces
    (23) Circular Aperture Leaf14 pieces
    (24) Laser Holder2 pieces
    (25) Laser Clamp1 piece
    (26) Beam Expander Insert1 piece
    (27) Beam Expander Lens Adapter1 piece
    (28) Beamsplitter Insert1 piece

    Which tools to use

    ToolImageComment
    Electric screw driver with 2,5 mm hex bitFor putting the cubes together using M3×12 and M3×8 screws.
    2,5 mm hex keyFor fine adjustment of all the M3 screws if needed.
    1,5 mm hex key↑↑For mounting worm screws.
    Needle-nose PliersMight come handy

    Assembly

    Part - linkResultComment
    Baseplates1× "big" baseplate (4×2), 4× "small" baseplate (4×1), 1× "unit" baseplate (1×1), 1× "unit" baseplate connector (1×1)
    Z-Stage Cube1× mechanical Z-Stage, Sample Clamp not necessary
    Lens Cubes6× Lens Cube; Write the focal lengths of the lenses on the holders, so you can always easily find the right one when building the setups.
    Cylindrical Lens Cube1× Cylindrical Lens Cube
    Sample Cubes5× Sample Holder Cube
    Mirror Cube1× Mirror Cube
    Flashlight Cube1× Flashlight Cube
    Circular Aperture Cube2× Circular Aperture Cube
    Laser Cube1× Laser Holder Cube and Laser Clamp
    Beam Expander Cube1× Beam Expander Cube
    Beamsplitter Cube1× Beam Expander Cube

    Done! Great job!

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryElectronics/Automation_intro/index.html b/docs/Toolboxes/DiscoveryElectronics/Automation_intro/index.html index adcef6c03..994034798 100644 --- a/docs/Toolboxes/DiscoveryElectronics/Automation_intro/index.html +++ b/docs/Toolboxes/DiscoveryElectronics/Automation_intro/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -34,7 +34,7 @@

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryElectronics/Camera Setup/index.html b/docs/Toolboxes/DiscoveryElectronics/Camera Setup/index.html index 8b56f1a7b..2083ad1bf 100644 --- a/docs/Toolboxes/DiscoveryElectronics/Camera Setup/index.html +++ b/docs/Toolboxes/DiscoveryElectronics/Camera Setup/index.html @@ -10,13 +10,13 @@ - - + +

    openUC2 Camera Setup

    The openUC2 Camera Setup provides guidance on configuring and using webcams and Daheng cameras for imaging purposes within the openUC2 ecosystem. This setup allows users to seamlessly integrate cameras into their experimental setups and utilize them for imaging and data acquisition. Below are detailed instructions for setting up cameras on different platforms:

    Webcam

    On Windows

    • Users can utilize the built-in webcam functionality provided by Windows. They need to open the webcam using the Windows internal software and start streaming.

    On Mac

    • For Mac users, the Photobooth application can be used to access the webcam. Simply open the Photobooth application and select the camera to start capturing images or videos.

    Alternative Method

    • Users can also use the openUC2 Web Serial interface available at https://youseetoo.github.io/indexWebSerialTest.html to open the camera stream.

    Daheng Cameras

    On Windows

    • To use Daheng cameras on Windows, users should visit https://www.get-cameras.com/customerdownloads?submissionGuid=93704570-544a-43e8-83d6-f5f3cf0b97fb.
    • From the provided options, select the "Windows SDK USB2+USB3+GigE (including Directshow + Python) Galaxy V1.23.2305.9161" package.
    • Install the software and drivers from the downloaded package.
    • Once installed, users can start the "Galaxy Viewer" application to begin capturing images using the Daheng camera.

    On Android Phones

    • To use Daheng cameras on Android phones, users should first visit https://www.get-cameras.com/customerdownloads?submissionGuid=93704570-544a-43e8-83d6-f5f3cf0b97fb.
    • From the provided options, select the "Android USB3 SDK v1.2.2112.9201" package and download it.
    • After downloading, unzip the package and install the "GxViewer_GetRawImage.apk" on the Android phone (users may need to allow installation of apps from unknown sources or 3rd party apps).
    • Connect the Daheng camera to the Android phone using a USB-C to Daheng cable (adapter).
    • Open the installed app ("GxViewer_GetRawImage") and grant access to the USB connection when prompted.
    • Users can adjust camera settings by swiping left in the app and then proceed to capture images.

    Video Tutorial

    A video tutorial demonstrating the camera setup is available at https://youtu.be/PtdU5qE6BSc.

    The openUC2 Camera Setup provides users with easy-to-follow instructions for configuring and utilizing webcams and Daheng cameras on different platforms, enabling seamless integration into various imaging applications and experiments.

    XIAO Sense Camera

    Coming Soon.

    You can have a glimpse here https://github.com/openUC2/openUC2-SEEED-XIAO-Camera/

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryElectronics/XYZ_stage_mico/index.html b/docs/Toolboxes/DiscoveryElectronics/XYZ_stage_mico/index.html index 812b95500..5c867002a 100644 --- a/docs/Toolboxes/DiscoveryElectronics/XYZ_stage_mico/index.html +++ b/docs/Toolboxes/DiscoveryElectronics/XYZ_stage_mico/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -20,7 +20,7 @@ Fully assembled XYZ stage with high precision stepper motors, designed for seamless automation in microscopy setups.

    Key Features:

    • Mounting Flexibility: The XYZ stage can be easily mounted on top of a cube or suspended at the side, offering flexibility in integrating it into various experimental setups.

    • Interferometer and Microscopy Applications: This stage finds application in interferometers and light-sheet/fluorescence microscopes, where it plays a crucial role in precisely manipulating the sample in all directions.

    • Durable Construction: Constructed entirely from metal, the XYZ stage ensures robustness and stability during delicate experiments.

    • High Precision Stepper Motors: The stage is equipped with non-captive stepper motors, delivering exceptional precision during positioning operations.

    XYZ Stage in an Interferometer Setup Image showing two XYZ stages (one motorized and one manual stage) employed in an OCT / Michelson type interferometer.

    Setup and Integration: To assist users in setting up and integrating the XYZ stage into their experimental configurations, a comprehensive video guide is available. This instructional video can be viewed at https://www.youtube.com/embed/E_hhclFqx5g.

    For further information or inquiries regarding the openUC2 XYZ Micrometer Stage, interested parties can refer to the official openOCT project page at https://github.com/openUC2/openUC2-Hackathon-openOCTRemote. The project page contains additional details, resources, and support for utilizing the XYZ stage effectively in diverse research settings.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryElectronics/seeedmicroscope/index.html b/docs/Toolboxes/DiscoveryElectronics/seeedmicroscope/index.html index 8a5bfaaad..385bdd9ee 100644 --- a/docs/Toolboxes/DiscoveryElectronics/seeedmicroscope/index.html +++ b/docs/Toolboxes/DiscoveryElectronics/seeedmicroscope/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -55,7 +55,7 @@ Seeedmicroscope_40

    Focus sample with manual focusing stage Seeedmicroscope_41

    Using an Android APP

    For Android users, plesae have a look here: https://matchboxscope.github.io/docs/APP

    This app will help you connect and capture images using this microscope.

    Conclusion

    Congratulations! You have successfully assembled your modular microscope. With this microscope, you can now observe various samples and capture images using the camera connected to your smartphone or computer. This modular design allows for easy customization and experimentation, making it a versatile tool for exploring the microscopic world.

    Remember, the performance of the microscope might be affected by the modifications made to the objective lens, so adjust your expectations accordingly. Enjoy exploring and discovering the hidden wonders of the microcosmos!

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryElectronics/spectrometer/index.html b/docs/Toolboxes/DiscoveryElectronics/spectrometer/index.html index fc1c4f4b6..1222af651 100644 --- a/docs/Toolboxes/DiscoveryElectronics/spectrometer/index.html +++ b/docs/Toolboxes/DiscoveryElectronics/spectrometer/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -31,7 +31,7 @@ 8. Example Plot of a spectrum (white light)

    Operating the Spectrometer

    • Calibration: Essential for accurate measurements.
    • Usage: Can be used to analyze spectra from various light sources.

    10. Example Plot of a spectrum (red LED)

    10. Example Plot of a spectrum (green LED)

    Further Reading and Resources

    For more in-depth information on spectroscopy and related subjects, refer to resources provided by Public Lab and other scientific publications.

    Public Lab: https://publiclab.org/wiki/spectrometry

    Gaudi Lab: https://www.gaudi.ch/GaudiLabs/?page_id=825

    ESPectrometer: https://matchboxscope.github.io/docs/Variants/ESPectrometer

    Youtube: https://www.youtube.com/watch?app=desktop&v=T_goVwwxKE4&ab_channel=Les%27Lab

    Software: https://github.com/leswright1977/PySpectrometer

    Contributing and Collaboration

    This open-source project welcomes contributions from everyone. Whether you're experienced in CAD design or programming,

    or just starting out, there are many ways to contribute. Check out our CONTRIBUTING guide for more details.

    Licensing and Collaboration Notes

    This project is licensed under the CERN open hardware license. We encourage users to share their modifications and improvements. All design files are available for free, but we appreciate feedback and collaboration.

    For details on the licensing, please visit License.md.

    Note: Design files were created using Autodesk Inventor 2019 (EDUCATION).

    Stay Connected

    If you find this project beneficial, please star this repository, follow us on Twitter, and cite our webpage in your work!

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryFluorescence/index.html b/docs/Toolboxes/DiscoveryFluorescence/index.html index 40c4c441d..86cdc248d 100644 --- a/docs/Toolboxes/DiscoveryFluorescence/index.html +++ b/docs/Toolboxes/DiscoveryFluorescence/index.html @@ -10,13 +10,13 @@ - - + + - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryInterferometer/InlineHolography/index.html b/docs/Toolboxes/DiscoveryInterferometer/InlineHolography/index.html index b63f483dc..aa4eb23a8 100644 --- a/docs/Toolboxes/DiscoveryInterferometer/InlineHolography/index.html +++ b/docs/Toolboxes/DiscoveryInterferometer/InlineHolography/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -26,7 +26,7 @@ Here you will finde a guide how to setup the ImSwitch Software:

    • Download the Software package from Dropbox
    • Install Anaconda (Important: When you're asked to add Anaconda to the PATH environment, say YES!)
    • Install Arduino + all drivers
    • Install the CH340 driver
    • Extract ImSwitch.zip to /User/$USER$/Documents/ImSwitch (clone or download from GitHub)
    • Extract ImSwitchConfig.zip to /User/$USER$/Documents/ImSwitchConfig (clone or download from GitHub)
    • Optional: Install Visual Studio Code + the Python plugin => setup the Visual studio code IDE for Python

    Install ImSwitch for UC2

    • Open the anaconda command (Windows + R => "CMD" => enter)
    • Type: conda create -n imswitch
    • Wait until environment is created
    • Type: conda activate imswitch
    • Type: cd /User/$USER$/Documents/ImSwitch
    • Type: pip intall -r requirements.txt
    • Type: pip intall -e ./
    • Type: imswitch

    Reconstruction

    This video will show you how to reconstruct holographic data using UC2 and ImSwitch.

    https://youtu.be/CWXx0Dw-Jro

    Things to explore:

    • Get Familiar with ImSwitch
    • Get a sparse sample e.g. plankton on coverslip would be best, or just dust/sand/cheeck cells and try to acquire some holograms

    Refocusing using ImSwitch

    Using the In-line Holography plug-in widget in ImSwitch we can refocus the sample by using a propagator in reverse from the recorded hologram in real-time.

    The In-line holography experiment can also be produced with a laser source. In this version of the In-line holography setup, we use white light as source and we use filters to have quasi-monochromatic light illuminating the sample.

    ADDITIONAL Speach-to-text

    The first experiment will be the inline holographic microscope. This is a relatively simple experiment where we can show both the temporal and especially coherence. We will create a lensless microscope where we use an LED that is filtered by a color filter and pinhole to create a quasi one of chromatic coherent light source. This is then illuminating the transparent sample that is sparse before the scattered wave is sitting the camera sensor. This is relatively simple to build with the C2 system; for this, we only need the LED holder, a gel color filter, as it sees from theaters, aluminum foil where we'll stitch in a hole in order to create a local pinhole, some space between this created light source and the sample, and then the sample that this ultimately glued onto the sensor very closely so that the pinhole virtually scales in size as the ratio between the distance of the light source to the sample and sample to the sensor. In order to build the system, we will place the here created light source on the far left; then another empty cube follows right next to it; then another empty cube follows on the right-hand side; and then we combine the sample mount and the camera into one cube so that the distance between the sample and the camera is minimized. All these cubes should be mounted on puzzle pieces on the lower end and the upper bar so that the whole system becomes stable. We will turn on the camera and also turn on the lights source. Then we go to the web app after connecting to the camera through Wi-Fi, and then we will try to see any variation in the contrast of the camera. If the contrast is not high enough because of this scattering background light, we have to cover the system with a box or with some closing so that there's no straight lights hitting the sensor. This will make a very bad result in the reconstruction. When you're lucky, you can see the sample as a kind of shadow on the sensor already. The core idea now is to reconstruct this digital hologram, where we have to carefully maximize the quality of the file image. Compression artifacts from the ESP32 camera are unavoidable and will eventually degrade the final image results. What we are going to do now is to temper in image and then back propagate the distance from the sensor to the sandal plane using a numerical transformation. What this really means is that we take the image and take every pixel and back propagated by a certain distance numerically. This is done using a fast-year transform where we first fiatransform the image so that it is in frequency space; then we multiply it with a parabolic face Factor, and then we inverse full-year transform the results to end up in real space again. This becomes a convolution of the Fresnel colonel, which essentially propagates every pixel edge of certain distance depending on the wavelength and sampling rate. We can conveniently do that in Python with the Script that is provided by the Jupiter notebook. For this, we go to the website of the ESP32, hit the capture button, and download the image onto the computer. Then we start the Jupiter notebook server by opening the command line in Windows or in Linux and enter Jupiter notebook. Then we go to the browser and open the example Jupiter notebook that will reconstruct our hologram. We will enter the path of our downloaded image file and then reconstruct the results. There are several problems which we can describe but not solve at the moment for stop inland holography, as the name already says, has the problem that the light source and the scattered wave interfere in line. That means the point source will create spherical waves that are propagating its free space and will become almost a plain wave when it's the sample. Here some parts of the wave are scattered where which means that a plane wave is altered in its face depending on the face of the microscopic example, and some portion of the wave is an altered. That means after the sample the unchecked and scattered wave are propagating to the sensor where the two amplitudes are superposing. That means they add up for stuff since our camera detector cannot record amplitudes since the object of frequency is very very high. We are averaging out over time. That means that we will record intensity values in the end. This also means that the information about the face is getting lost. When we are reconstructing the hologram, the color will differentiate whether the sample is before or behind the sensor since the face information is that anymore. This means that in the reconstruction, the so-called twin image always overlays the real image in the end. This causes an avoidable ringing artifacts in the reconstruction. There are some ways to remove it, for example by estimating the face using iterative algorithms or model-based approaches, where we take the full image acquisition process into account. Alternatively, suit also be machine learning algorithms where an algorithm estimates the background and remove these artifacts. However, here we won't use these algorithms as we just want to learn how we can reconstruct the simple.

    Some notes on the transform that we have just used here. Briefly, it is a transformation from spatial to frequency coordinates. This sounds very abstract, but for example, our ear does this all the time. When we talk, our voice generates a vibration of the air. That means different frequencies are oscillating and add up to something like noise. Our ear, in turn, has the cochlear where many nerve cells, in turn, are oscillating depending on the resonance frequency of every cell. In a way, they are unmixing the noise and modulate the different frequencies. That means that if you're singing like an A, there is the fundamental frequency and several higher and lower harmonics. And lens does something very similar but in two dimensions. You can have optical frequencies where, for example, a grating that is having stripes that represent on and off and on and off

    at a certain distance represent periodic structure. It lens when you place something in the focal plane will then flea transform this into the demodulated frequency components. When you, for example, have a periodic structure like a grating, it will produce two pieces in its Fourier transform or in its focal length on the object side. A fast Fourier transform is its equivalent in the computational science. You can take an image and then represent it in its frequency components for stock that means it tries to estimate the sum of all the different frequency components that make up the image. We use this fast Fourier transform in our code to bring it from real space to frequency space and back again. But since we start with an image without an amplitude or without the face, lack the information.

    This property creates additional artifacts since relax the information of the face when we record intensity values on our camera, we also limited to samples that I see like just for the tomt capture in the watcher. The optical resolution of our microscope is bound to the pixel size and the opening angle or the numerical aperture that is created by the illumination and the sensor size that we use to detect the image. However, it is a very nice way of demonstrating how long profil works and how we can detect images without a lens. For stop many different have used it, for example, to detect Malaria in blood. New sins the field of view is very Deutsch.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryInterferometer/Interferometer_intro/index.html b/docs/Toolboxes/DiscoveryInterferometer/Interferometer_intro/index.html index 24a3a5941..3a57457c1 100644 --- a/docs/Toolboxes/DiscoveryInterferometer/Interferometer_intro/index.html +++ b/docs/Toolboxes/DiscoveryInterferometer/Interferometer_intro/index.html @@ -10,13 +10,13 @@ - - + +

    openUC2 Interferometer Introduction

    This is a a collection of different mini-tutorials to assemble the different optical systems using UC2. First, we will introduce the setup with a brief text. Afterwards, a little video will help you assembling the device. If you have any questions, please feel free to post a question in the Forum or in the Github Issue section.

    What will you learn?

    • What's inside the box?
    • How can we start different experiments?

    What's inside the box?

    Duration: 3

    Inside the box you will find a number of different cubes, all coming with different functionalities. Below you will find a list of all modules inside the discovery kit.

    Lasers and Beamexpanders

    Duration: 3

    Lasers and Interferometers

    Duration: 3

    Microscopes

    Duration: 3

    Polarization

    Duration: 3

    Microscope with Webcam

    Duration: 3

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryInterferometer/MachZenderInterferometer/index.html b/docs/Toolboxes/DiscoveryInterferometer/MachZenderInterferometer/index.html index 8221c6d79..7d823d215 100644 --- a/docs/Toolboxes/DiscoveryInterferometer/MachZenderInterferometer/index.html +++ b/docs/Toolboxes/DiscoveryInterferometer/MachZenderInterferometer/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -20,7 +20,7 @@

    Step 6: Setup and alignment

    Place the camera on the sample arm as shown. Put the screen on the other arm exit. Place the sample holder using one half of the cube at a time to not collide with the microscope objective.

    Turn the laser on and use the screen to align both beams using the screws on the reference mirror.

    Step 7: Connect and adjust in the MVS app

    Connect the camera to the computer and open the MVS app. Block the reference beam. Move the coverslide such that your sample enters the FoV (Field of View). Unblock the reference beam. Zoom into the image to distinguish the fringe pattern in the MVS camera display. Adjust the angles of the reference mirror using the screws to change the fringe pattern as shown.

    Step 7: Data processing

    Process the data. Phase unwrapping possible.

    First Tests with Modifications to the Original Setup

    Using Lei code, the need of a linear stage for the sample was identified. Adjusting the objective and tube lens enhances the interference, making it crucial to use the ImSwitch interface to see the FFT in real time and optimize. The final goal is to move the position of the first order interference to use Lei algorithm (or some Phase unwrapping algorithm) to retrieve the Phase. To achieve this, two images need to be acquired: a sample image and a background image (without a cover slide or a slide region with no specimen).

    Result of Phase Unwrapping

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometer/index.html b/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometer/index.html index 43f5c8547..37b34ed97 100644 --- a/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometer/index.html +++ b/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometer/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -35,7 +35,7 @@

    Step 12: Adjust the camera exposure

    Adjust the exposure time of the camera. You should see a fringe pattern. Try to adjust the reference mirror screws finely to bring the center of the interference pattern to the center of the camera.

    Experimental Data

    This is the fully assembled UC2 interferometer with a green laser diode, a camera representing a scree and to digitize the inteference, a beamsplitter, a kinematic mirror and a mirror that can be translated along Z.

    If you bring the two beams on top of each other, you will be able to observe the interference pattern, which in case of one beam exactly overlaying the other will be a ring pattern. These rings are also called Newton rings and come from the fact that we interfere two divergent beams, leading to a super position of two spherical caps/waves.

    Using the ESP32 camera, we can quantify the motion of the beams and e.g. measure distances or angles.

    Conclusion

    Congratulations! You have successfully built a Michelson Interferometer using the UC2 modular microscope toolbox. This device allows you to explore the interference properties of light and perform fascinating experiments. As you move one of the arms, you will observe constructive and destructive interference patterns on the camera, demonstrating the wave-like nature of light. Have fun experimenting with different setups and learning more about the wave-particle duality of light!

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial/index.html b/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial/index.html index f80ce6d1f..f2929f664 100644 --- a/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial/index.html +++ b/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorial/index.html @@ -10,14 +10,14 @@ - - + +

    HIK-Camera Software Installation

    Install MVS App for Camera Utilization

    Camera model: MV-CE060-10UC. Visit the HIKROBOTICS website and download the MVS software suitable for your computer. Below are steps exemplifying the software installation for Mac.

    Install the downloaded file.

    Open the MVS Software.

    You should see the following window.

    Connect the camera. Refresh the USB line to detect the camera.

    Select the make-link button on the detected camera.

    The following window should be displayed.

    Click on the play button in the actions bar of the camera.

    If properly connected, you should see a real-time image. Adjust the exposure if the image is overexposed.

    To adjust the exposure time, go to the Feature tree, select the Acquisition Control Category, and change the Exposure Auto option to Continuous.

    Now, a clear image with good contrast should be visible.

    To stop recording, click on the stop button in the camera's actions bar.

    To disconnect the camera, click on the break-link button next to the detected camera in the USB devices list.

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPhaseMicroscopy/DPCmicroscopy/index.html b/docs/Toolboxes/DiscoveryPhaseMicroscopy/DPCmicroscopy/index.html index 8f7e5a987..165c5bb76 100644 --- a/docs/Toolboxes/DiscoveryPhaseMicroscopy/DPCmicroscopy/index.html +++ b/docs/Toolboxes/DiscoveryPhaseMicroscopy/DPCmicroscopy/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -19,7 +19,7 @@

    Step 3: Build the DPC setup

    Substep 1

    Build the camera module as shown. It comprises of a tube lens and a Hikrobot Camera. Adjust the screw which binds the camera to the camera base plate to get the right distance between the camera and the tube lens.

    Substep 2

    Insert the non-kinematic mirror, the microscope objective in the fixed mount and the XYZ stage accordingly.

    Substep 3

    Build the illumination module which comprises of the LED array and the condenser lens as shown.

    Substep 4

    Finally, on top of the module built in substep 2 add the illimination module.

    Step 4: Adjust the Source-sample distance

    First, adjust the distance between the LED array and the condenser lens by placing them a focal distance (f = 50 mm) apart. This assures the plane wave illumination. Then, adjust the XYZ to the central positions. Adjust the Microscope objective position so that it matches roughly the working distance.

    Step 5: Focus on the sample

    Use Imswitch to turn one of the central LEDs, place a test sample to focus on it by coarse moving the microscope objective and finely tuning the height using the XYZ stage. Once it is in focus, adjust the distance from the condenser to the sample to be the focal length (f = 50 mm). In this geometry the LED array dimensions are near the match illumination condition. Hence, some LEDs illuminate at the objective NA (NAi = NAobj).

    Note: If your sample is transparent be careful not to crash the sample with the microscope objective! For more information about this experimental setup look at: 3D differential phase-contrast microscopy with computational illumination using an LED array.

    Example of illuminating sample with one half circle illumination. We should be able to see the phase gradient using oblique illumination. In the figure we can compare a defocused and focused image of a cheek cells sample.

    Step 6: Run the ImDPC experiment!

    Once you have focused on the sample, adjust the desired FoV. Now you are set. Click Start on the DPC widget!

    Congrats! You have created a DPC microscope with OpenUC2!

    DPC Images

    Using the reconstruction algorithm we can retrieve the phase of the sample.

    First test with the OpenUC2-DPC setup:

    In the animation you can compare the contrast that we can get with brightfield illumination and the DPC reconstruction generated by the four images taken with the half circle illumination.

    Taking a series of DPC images at different focal planes. Cropped DPC image of Unknown cells (top) and Cheek cells (bottom) captured with 0.25 NA microscope objective with 10x magnification.

    Left:Cropped DPC image captured with 0.17 NA microscope objective with 4x magnification.

    Reconstruction algorithm (Waller-Lab)

    The reconstruction algorithm works with the development of the Weak Object Transfer Function (WOTF). Using the code implemented by Waller (Waller-Lab/DPC), we are able to reconstruct the absorption and phase of the samples. Here we explain each step and implementation of the code using Imswitch.

    We are going to revise each part of the code and understand it.

    Acquisition

    We need four images corresponding to each half-circle illumination pattern. With a good exposure time for the camera to reduce noise. In the figure we can see an example of the four captured DPC images.

    We can correct the images using flatfield correction. Flatfield correction consists on taking an image without the sample, then we take the image to be corrected and divided by the flatfield image. This enables us to get rid of noise like dust on the camera, for instance.

    The code

    The code consist of a Jupyter notebook and one python script.

    Python script: dpc_algorithm.py

    This script contains the core algorithm to solve the DPC problem and from the four acquired images retrieve the phase.

    import numpy as np
    from scipy.ndimage import uniform_filter
    pi = np.pi
    naxis = np.newaxis
    F = lambda x: np.fft.fft2(x)
    IF = lambda x: np.fft.ifft2(x)

    def pupilGen(fxlin, fylin, wavelength, na, na_in=0.0):
    pupil = np.array(fxlin[naxis, :]**2+fylin[:, naxis]**2 <= (na/wavelength)**2)
    if na_in != 0.0:
    pupil[fxlin[naxis, :]**2+fylin[:, naxis]**2 < (na_in/wavelength)**2] = 0.0
    return pupil

    def _genGrid(size, dx):
    xlin = np.arange(size, dtype='complex128')
    return (xlin-size//2)*dx

    class DPCSolver:
    def __init__(self, dpc_imgs, wavelength, na, na_in, pixel_size, rotation, dpc_num=4):
    self.wavelength = wavelength
    self.na = na
    self.na_in = na_in
    self.pixel_size = pixel_size
    self.dpc_num = 4
    self.rotation = rotation
    self.fxlin = np.fft.ifftshift(_genGrid(dpc_imgs.shape[-1], 1.0/dpc_imgs.shape[-1]/self.pixel_size))
    self.fylin = np.fft.ifftshift(_genGrid(dpc_imgs.shape[-2], 1.0/dpc_imgs.shape[-2]/self.pixel_size))
    self.dpc_imgs = dpc_imgs.astype('float64')
    self.normalization()
    self.pupil = pupilGen(self.fxlin, self.fylin, self.wavelength, self.na)
    self.sourceGen()
    self.WOTFGen()

    def setTikhonovRegularization(self, reg_u = 1e-6, reg_p = 1e-6):
    self.reg_u = reg_u
    self.reg_p = reg_p

    def normalization(self):
    for img in self.dpc_imgs:
    img /= uniform_filter(img, size=img.shape[0]//2)
    meanIntensity = img.mean()
    img /= meanIntensity # normalize intensity with DC term
    img -= 1.0 # subtract the DC term

    def sourceGen(self):
    self.source = []
    pupil = pupilGen(self.fxlin, self.fylin, self.wavelength, self.na, na_in=self.na_in)
    for rotIdx in range(self.dpc_num):
    self.source.append(np.zeros((self.dpc_imgs.shape[-2:])))
    rotdegree = self.rotation[rotIdx]
    if rotdegree < 180:
    self.source[-1][self.fylin[:, naxis]*np.cos(np.deg2rad(rotdegree))+1e-15>=
    self.fxlin[naxis, :]*np.sin(np.deg2rad(rotdegree))] = 1.0
    self.source[-1] *= pupil
    else:
    self.source[-1][self.fylin[:, naxis]*np.cos(np.deg2rad(rotdegree))+1e-15<
    self.fxlin[naxis, :]*np.sin(np.deg2rad(rotdegree))] = -1.0
    self.source[-1] *= pupil
    self.source[-1] += pupil
    self.source = np.asarray(self.source)

    def WOTFGen(self):
    self.Hu = []
    self.Hp = []
    for rotIdx in range(self.source.shape[0]):
    FSP_cFP = F(self.source[rotIdx]*self.pupil)*F(self.pupil).conj()
    I0 = (self.source[rotIdx]*self.pupil*self.pupil.conj()).sum()
    self.Hu.append(2.0*IF(FSP_cFP.real)/I0)
    self.Hp.append(2.0j*IF(1j*FSP_cFP.imag)/I0)
    self.Hu = np.asarray(self.Hu)
    self.Hp = np.asarray(self.Hp)

    def solve(self, xini=None, plot_verbose=False, **kwargs):
    dpc_result = []
    AHA = [(self.Hu.conj()*self.Hu).sum(axis=0)+self.reg_u, (self.Hu.conj()*self.Hp).sum(axis=0),\
    (self.Hp.conj()*self.Hu).sum(axis=0) , (self.Hp.conj()*self.Hp).sum(axis=0)+self.reg_p]
    determinant = AHA[0]*AHA[3]-AHA[1]*AHA[2]
    for frame_index in range(self.dpc_imgs.shape[0]//self.dpc_num):
    fIntensity = np.asarray([F(self.dpc_imgs[frame_index*self.dpc_num+image_index]) for image_index in range(self.dpc_num)])
    AHy = np.asarray([(self.Hu.conj()*fIntensity).sum(axis=0), (self.Hp.conj()*fIntensity).sum(axis=0)])
    absorption = IF((AHA[3]*AHy[0]-AHA[1]*AHy[1])/determinant).real
    phase = IF((AHA[0]*AHy[1]-AHA[2]*AHy[0])/determinant).real
    dpc_result.append(absorption+1.0j*phase)

    return np.asarray(dpc_result)

    Jupyer notebook: main_dpc.ipynb

    With this Jupyter notebook you can test the DPC reconstruction algorithm using your own images!

    Import Modules

    %load_ext autoreload
    %autoreload 2
    %matplotlib notebook
    import numpy as np
    import matplotlib.pyplot as plt
    from os import listdir
    from skimage import io
    from mpl_toolkits.axes_grid1 import make_axes_locatable
    from dpc_algorithm import DPCSolver

    Load DPC Measurements

    data_path  = "../sample_data/" #INSERT YOUR DATA PATH HERE
    image_list = listdir(data_path)
    image_list = [image_file for image_file in image_list if image_file.endswith(".tif")]
    image_list.sort()
    dpc_images = np.array([io.imread(data_path+image_list[image_index]) for image_index in range(len(image_list))])
    #plot first set of measured DPC measurements
    f, ax = plt.subplots(2, 2, sharex=True, sharey=True, figsize=(6, 6))

    for plot_index in range(4):
    plot_row = plot_index//2
    plot_col = np.mod(plot_index, 2)
    ax[plot_row, plot_col].imshow(dpc_images[plot_index], cmap="gray",\
    extent=[0, dpc_images[0].shape[-1], 0, dpc_images[0].shape[-2]])
    ax[plot_row, plot_col].axis("off")
    ax[plot_row, plot_col].set_title("DPC {:02d}".format(plot_index))
    plt.show()

    Output (example):

    Set System Parameters

    wavelength     =  0.514 #micron
    mag = 40.0
    na = 0.40 #numerical aperture
    na_in = 0.0
    pixel_size_cam = 6.5 #pixel size of camera
    dpc_num = 4 #number of DPC images captured for each absorption and phase frame
    pixel_size = pixel_size_cam/mag
    rotation = [0, 180, 90, 270] #degree

    DPC Absorption and Phase Retrieval

    Initialize DPC Solver

    dpc_solver_obj = DPCSolver(dpc_images, wavelength, na, na_in, pixel_size, rotation, dpc_num=dpc_num)

    Visualize Source Patterns

    #plot the sources
    max_na_x = max(dpc_solver_obj.fxlin.real*dpc_solver_obj.wavelength/dpc_solver_obj.na)
    min_na_x = min(dpc_solver_obj.fxlin.real*dpc_solver_obj.wavelength/dpc_solver_obj.na)
    max_na_y = max(dpc_solver_obj.fylin.real*dpc_solver_obj.wavelength/dpc_solver_obj.na)
    min_na_y = min(dpc_solver_obj.fylin.real*dpc_solver_obj.wavelength/dpc_solver_obj.na)
    f, ax = plt.subplots(2, 2, sharex=True, sharey=True, figsize=(6, 6))
    for plot_index, source in enumerate(list(dpc_solver_obj.source)):
    plot_row = plot_index//2
    plot_col = np.mod(plot_index, 2)
    ax[plot_row, plot_col].imshow(np.fft.fftshift(dpc_solver_obj.source[plot_index]),\
    cmap='gray', clim=(0,1), extent=[min_na_x, max_na_x, min_na_y, max_na_y])
    ax[plot_row, plot_col].axis("off")
    ax[plot_row, plot_col].set_title("DPC Source {:02d}".format(plot_index))
    ax[plot_row, plot_col].set_xlim(-1.2, 1.2)
    ax[plot_row, plot_col].set_ylim(-1.2, 1.2)
    ax[plot_row, plot_col].set_aspect(1)

    Output (example):

    Visualize Weak Object Transfer Functions

    #plot the transfer functions
    f, ax = plt.subplots(2, 4, sharex=True, sharey=True, figsize = (10, 4))
    for plot_index in range(ax.size):
    plot_row = plot_index//4
    plot_col = np.mod(plot_index, 4)
    divider = make_axes_locatable(ax[plot_row, plot_col])
    cax = divider.append_axes("right", size="5%", pad=0.05)
    if plot_row == 0:
    plot = ax[plot_row, plot_col].imshow(np.fft.fftshift(dpc_solver_obj.Hu[plot_col].real), cmap='jet',\
    extent=[min_na_x, max_na_x, min_na_y, max_na_y], clim=[-2., 2.])
    ax[plot_row, plot_col].set_title("Absorption WOTF {:02d}".format(plot_col))
    plt.colorbar(plot, cax=cax, ticks=[-2., 0, 2.])
    else:
    plot = ax[plot_row, plot_col].imshow(np.fft.fftshift(dpc_solver_obj.Hp[plot_col].imag), cmap='jet',\
    extent=[min_na_x, max_na_x, min_na_y, max_na_y], clim=[-.8, .8])
    ax[plot_row, plot_col].set_title("Phase WOTF {:02d}".format(plot_col))
    plt.colorbar(plot, cax=cax, ticks=[-.8, 0, .8])
    ax[plot_row, plot_col].set_xlim(-2.2, 2.2)
    ax[plot_row, plot_col].set_ylim(-2.2, 2.2)
    ax[plot_row, plot_col].axis("off")
    ax[plot_row, plot_col].set_aspect(1)

    Output (example):

    Solve DPC Least Squares Problem

    #parameters for Tikhonov regurlarization [absorption, phase] ((need to tune this based on SNR)
    dpc_solver_obj.setTikhonovRegularization(reg_u = 1e-1, reg_p = 5e-3)
    dpc_result = dpc_solver_obj.solve()
    _, axes  = plt.subplots(1, 2, figsize=(10, 6), sharex=True, sharey=True)
    divider = make_axes_locatable(axes[0])
    cax_1 = divider.append_axes("right", size="5%", pad=0.05)
    plot = axes[0].imshow(dpc_result[0].real, clim=[-0.15, 0.02], cmap="gray", extent=[0, dpc_result[0].shape[-1], 0, dpc_result[0].shape[-2]])
    axes[0].axis("off")
    plt.colorbar(plot, cax=cax_1, ticks=[-0.15, 0.02])
    axes[0].set_title("Absorption")
    divider = make_axes_locatable(axes[1])
    cax_2 = divider.append_axes("right", size="5%", pad=0.05)
    plot = axes[1].imshow(dpc_result[0].imag, clim=[-1.0, 3.0], cmap="gray", extent=[0, dpc_result[0].shape[-1], 0, dpc_result[0].shape[-2]])
    axes[1].axis("off")
    plt.colorbar(plot, cax=cax_2, ticks=[-1.0, 3.0])
    axes[1].set_title("Phase")

    Output (example):

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPhaseMicroscopy/index.html b/docs/Toolboxes/DiscoveryPhaseMicroscopy/index.html index 94f3fccbc..d8772b0bf 100644 --- a/docs/Toolboxes/DiscoveryPhaseMicroscopy/index.html +++ b/docs/Toolboxes/DiscoveryPhaseMicroscopy/index.html @@ -10,13 +10,13 @@ - - + + - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/index.html index 2aa29c713..a923ff3ca 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -20,7 +20,7 @@ The amount of reflected light is specified by the optical properties of the reflecting surface, such as plastic sheets, glass, or highways.
    The incident angle of the incoming electromagnetic lightwave and refractive indices of media in which light travels through them have an essential role in the polarization degree of the reflected and refracted polarized light beams.

    You can see the reflection and transmission of unpolarized light with most of the incident angle (𝜃) values below.

    What is the Brewster angle?

    When the incident ray travels from a less dense medium (n1) to a higher dense medium (n2) with a critical angle (𝜃_B), the reflected ray is perfectly s-polarized in which the orientation of the electric field vectors are perpendicular to the plane of incidence. Otherwise, the refracted beam has a 90-degree polarization angle, partially p-polarized. This critical angle is called a Brewster angle or polarization angle and is represented by 𝜃_B in the scheme below.

    Brewster angle can be easily calculated using refractive indices of traveling media of light. In our experiment, we used air (n1 = 1) as the first medium in which light comes first and reflects in this part and microscope slide glass (n2 = 1.5) as the second medium, and the light transmits through. When we calculate the Brewster angle for our experiment, it equals approximately 57 degrees, and we can find the equation below.

    Parts

    Modules for this setup

    NamePropertiesPriceLink#
    4×4 BaseplateSkeleton of the System21.79 €Base-plate1
    MODULE: Polarizer CubeIt holds the linearly polarizing filter4.31 €Linear Polarizer1
    MODULE: Kinematic Microscope Slide HolderIt is used to insert the microscope slide with Brewster angle (53 degrees)3.7 €Microscope Slide Holder1
    MODULE: Laser CubeLASER source holder17.68 €Light Source 1
    EXTRA MODULE: Screen Holder CubeIt holds the Display Screen (Not Used in Practice)1 €Screen1

    Parts to print

    Additional components

    • Check out the RESOURCES for more information!
    • 1 × Linear Polarizing Sheet 🢂
    • 67 × 5 mm Ball magnets 🢂
    • 28 x Screws DIN912 ISO 4762 - M3×12 mm 🢂
    • 3 x Screws DIN912 ISO 4762 M2×16 mm 🢂
    • Microscope Rectangular Coverslips 🢂
    • 1 × 5 mW Blue UV Laser Pointer 🢂

    Assembly

    All necessary parts to assemble a Microscope Slide Glass Holder Module be gathered in the image below:

    Results

    Brewster's angle experiment setup can be prepared easily. After printing and assembling the module parts, we try to find a critical angle or Brewster's angle as much as we can.

    In this setup, we use LASER as a light source because it is easier to observe the polarization degree changes by reflection. As a first step, LASER light beams reflect from microscope slide glass and pass through a linear polarizer. Then a piece of paper is inserted on the sample holder comb. Finally, we can observe the totally polarized reflected light.

    We should make a good alignment to find the Brewster angle as the incidence angle of the incoming light. In practice, it is hard because of using screws, we could not find the exact incidence angle of a microscope slide. Nevertheless, I could take results that are almost perfectly polarized light after reflection in almost critical angle.
    You can see the reflected laser light beam without alignment at a random incident angle above.

    In the image below, the incoming beam is reflected with almost Brewster angle, 57 degrees for microscope slide glass:

    Let's look at our almost perfectly polarized light by reflection experiment video records. The orientation direction of the linear polarizer only changes in 1st and 2nd videos, and we see that reflected light is totally polarized almost. The light is blocked, and we can see almost no light after the polarizer when the orientation of the linear polarizer is perpendicular to the polarization orientation of the reflected beam.

    New Ideas

    We are open to new ideas. Please contribute to the project freely, this place is a free country which is built by codes and machines :robot:

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/index.html index 72f48aac3..ea77cfafc 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -21,7 +21,7 @@ The incoming light passes through a linear polarizer (grey UC2 cube). In the next step, new demonstrated linearly polarized light travels to the circular polarizer module (black UC2 cube). When an observer looks from the circular polarizer side, she/he can see the circularly polarized light.

    The circular polarizer filter was taken out of 3D cinema glasses, and it was inserted into a sample holder insert. This new circular polarizer filter insert was assembled with a UC2 unit block. Ta-da! The circular polarizer cube is ready for flight.

    The effect of the propagation direction of the linear polarizer can be seen in the video below. The polarization direction of light before the circular polarizer changes with turning the wheel of the linear polarizer and changing its orientation.

    New Ideas

    Dear Visitor,
    you have an opportunity to view our experiments. If you have a new idea, just open a new issue and shine our eyes with your light.
    Greetings from UC2 Team

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/index.html index df595bfa9..3ee15429c 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/index.html @@ -10,15 +10,15 @@ - - + +

    Crossed Polarizers

    Crossed Polarizers are used to analyze the polarization of light. We use two linear polarizers, which are located perpendicular to each other. [1]

    In the experiment, the polarizing direction of the first polarizer is oriented vertically to the incoming beam, and it will allow passing only vertical electric field vectors. After the first polarizer, we have an s-polarized light wave. [2]

    The second polarizer is located horizontally to the electric field vector. It blocks the wave which passes through the first polarizer. These two polarizers should be oriented at the right angle with respect to each other. You can see the orientation of the linear filters and light polarization change during the experiment in the figure below.

    Time to build a Crossed Polarizers setup!

    Parts

    Modules for this setup

    NamePropertiesPriceLink#
    4×1 BaseplateSkeleton of the System5.47 €Base-plate1
    MODULE: Polarizer CubeIt holds the linearly polarizing filter8.62 €Linear Polarizer2
    EXTRA MODULE: Sample Holder CubeIt holds the Sample (Not Used in Practice)1.3 €Sample Holder1
    EXTRA MODULE: Screen Holder CubeIt holds the Display Screen (Not Used in Practice)1 €Screen1
    EXTRA MODULE: Flashlight Lamp CubeLight Source7.2 €Flashlight1

    Parts to print

    Additional components

    • Check out the RESOURCES for more information!
    • 1 × Linear Polarizing Sheet 🢂
    • 16 × 5 mm Ball magnets 🢂
    • 16 x Screws DIN912 ISO 4762 - M3×12 mm 🢂
    • 6 x Screws DIN912 ISO 4762 M2×16 mm 🢂
    • NOT USED 1 × flashlight 🢂

    Assembly

    If all written modules are used in the experiment, the setup will look like:

    Results

    We printed and assembled two Linear Polarizer module parts. Then, we bought the necessary components and inserted them into cubes.
    You will find the basic version of Crossed Polarization experiment without a specific sample and additional light source below. We demonstrated the experiment with a room light.

    We can observe the direct effect of the angle between two linear polarizers in the video below. The intensity of passing light through crossed polarizers changes when the direction angle of the polarization filter changes 360 degrees.

    New Ideas

    We are open to new ideas. Please contribute to the project freely, this place is a free country which is built by codes and machines :robot:

    References

    [1] Introduction to Polarized Light. (n.d.). Nikon’s MicroscopyU. Retrieved February 15, 2021, from https://www.microscopyu.com/techniques/polarized-light/introduction-to-polarized-light
    [2] Logiurato, F. (2018). Teaching Light Polarization by Putting Art and Physics Together. The Physics Teachers, 1–5. https://arxiv.org/ftp/arxiv/papers/1803/1803.09645.pdf

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/index.html index a8b017700..def963377 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -19,7 +19,7 @@ The printed and assembled Sample Holder Comb module with nine microscope glasses:

    You will find the basic version of the experiment without a Screen adn Linear Polarizer module below.

    Images of the resulting experimental setup;
    side view (top) , top view (bottom)

    We can observe the direct effect of the rotational angle of the linear polarizer in the video below. Laser light travels to microscope slides and air gap media several times and gets close to fully polarized light. We can see this effect eith adding Linear Polarizer cube.
    The intensity of passing light through linear polarizer changes when the direction angle of the polarization filter changes 360 degrees.

    We can see the change with two videos below. They demonstrates the experiment from two different views.

    New Ideas

    Rat-Tat! We are here to hear new ideas. Please don't shy and have a heart-to-heart talk with us. 💝

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/index.html index 1ed202f6b..50779f632 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/index.html @@ -10,13 +10,13 @@ - - + +

    Newton's Rings Experiment

    Soap bubbles, oil slicks, or oxidized metal surfaces can create interference patterns under the white light illumination. In Newton's fringes, light reflects as a constructively or destructively interfered when the light waves travel through two surfaces. These combinations can be created using glass-air or air-glass contacts. These two interferences generate a concentric ring pattern of rainbow colours in white light illumination. In the same way, monochromatic light creates dark-light rings.

    The simplest example can be made using two well-cleaned microscope slides as interfaces. Air film is enclosed between two slides inconsistently, and irregular-coloured fringes are generated under the daily light. When the pressure on the microscope slides changes, fringes move and change.

    In the Newton's Rings Experiment, we used a convex lens whose surface is placed on an optical plane glass, a microscope glass, from its long focal length side. These two pieces are held together with non-uniform thin air film. After light illumination through these surfaces,the air gap and random pressures on the microscope slide and plano-convex lens cause the generation of irregular coloured or single-colour fringes; Newton's Rings.

    The details of Experiment Modules

    Linear Polarizer is used in this experiment to visualize the polarization change of reflected lights from two media on the interference pattern.

    Additional module design was made for combining the microscope glass slide and plano-convex lens inside one cube insert. You can see rendered image of the Newton's Rings Lens-Slide Holder Module from Inventor.

    s

    We used laser as a light souce in the setup. During the experiment, we extended beam size of the pen laser from 2 mm to 6 mm using a regular Beam Expander Module.

    Parts

    Modules for this setup

    NamePropertiesPriceLink#
    4×4 BaseplateSkeleton of the System21.79 €Base-plate1
    MODULE: Beam Expander CubeIt expands the laser beam size13.55 €Beam Expander1
    MODULE: Beam Splitter CubeIt splits the incoming beam and recombine them29.17 €Beam Splitter Holder1
    MODULE: Newton's Rings Slide-Lens Holder CubeIt creates Newton's Rings7.54 €Lens - Slide Holder1
    MODULE: Polarizer CubeIt holds the linearly polarizing filter4.31 €Linear Polarizer1
    MODULE: Laser CubeLASER source holder17.68 €Light Source1
    EXTRA MODULE: Screen Holder CubeIt holds the Display Screen (Not Used in Practice)1 €Screen1

    Parts to print

    Additional components

    • Check out the RESOURCES for more information!
    • 1 × Linear Polarizing Sheet 🢂
    • 64 × 5 mm Ball magnets 🢂
    • 44 x Screws DIN912 ISO 4762 - M3×12 mm 🢂
    • 3 x Screws DIN912 ISO 4762 M2×16 mm 🢂
    • 1 x Beamsplitter Cube (Art. 2137) 🢂
    • 1 x Microscope Rectangular Coverslips 🢂
    • 1 x Plano-Convex Lens 🢂
    • 1 × 5 mW Blue UV Laser Pointer 🢂
    • 1 x iPhone 5 Lens f'=3mm (separated from an iPhone camera spare part) 🢂
    • 1 x Achromat Lens f' = 26,5 mm 🢂

    Assembly

    Results

    We started with building of UC2 Modules: design, print, assemble and be ready for testing. You can see our Beam Expander Cube on the 4x1 Baseplate below.

    Lens - Slide Holder Module is the key element for the generation of Newton's Rings. Necessary Module parts are shown in the image below.

    Assembled and Ready-To-Use module should look like ...

    The experimental procedure begins with

    • installation the Laser and Beam Expander Modules on the 4x4 Baseplate.

    • After checking the expansion of the laser beam width, Beam Splitter Cube is added to the setup.

    • One of the divided incoming beams is directed to the Newton's Rings Lens & Microscope Holder cube, and light reflects from the convex lens-plate glass combination through the beam splitter cube, then on observation screen.

    Demonstrating the experiment is much easier with a laser light source and results in visible fringes. The Newton's Fringes will vary in colour from inner to outer circles if a white light source is replaced with a laser source.

    Let's zoom in Newton's Fringes with more experiment images!

    The effect of the polarization angle change of the Linear Polarizer Filter can be seen in the video.

    New Ideas

    We are open to new ideas. Please contribute to the project freely, this place is a free country which is built by codes and machines :robot:

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/index.html index 96706201d..5ab6f25f0 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/index.html @@ -10,14 +10,14 @@ - - + +

    Polarization Experiment using Optically Active Solution

    The optical activity was discovered by Arago in the quartz in 1811. In 1847, molecular chirality was observed by scientist Louis Pasteur. He found that natural tartaric acid is optically active, and its external crystals have isomerism and chiral morphology [1].

    Two molecules with the same chemical formula, the mirror image of the other, describe molecular chirality. These come in two varieties: dextrorotatory (rotate plane-polarized light clockwise) and levorotatory (counterclockwise).

    Sucrose is a disaccharide made of glucose and fructose and dextrorotatory, which rotates the plane-polarized light to the right. A well-known example of sucrose is table sugar produced naturally in plants. Fructose is a simple ketonic sugar and levorotatory which rotates the plane-polarized light to the left. Glucose is a simple sugar that belongs to the carbohydrate family and is dextrorotatory. The molecules of fructose and glucose are mirror images of each other. Corn syrup is one of the most commonly used sugar solutions [2].

    Two simple sugar-water solutions were prepared and used in the experiment. The first solution was produced with one cup of table sugar and one cup of water. Table sugar is sucrose and dextrorotatory, turning clockwise to the right plane-polarized light. Grape sugar is dextrorotatory and glucose, and the second solution mixes grape sugar (Traubenzucker) and water components in the same amount. It rotates the incoming light polarization state to the right, clockwise direction. However, two solutions have different polarization states at the same time because of their molecular structure and demonstrate different colors inside the crossed polarizers.

    s

    In the image, Table sugar-water solution is shown in left-side, Grape sugar-water solution is in the right glass.

    Parts

    Modules for this setup

    NamePropertiesPriceLink#
    4×1 BaseplateSkeleton of the System5.47 €Base-plate1
    MODULE: Polarizer CubeIt holds the linearly polarizing filter8.62 €Linear Polarizer2
    MODULE: Active Solution ChamberIt contains sugar-water solutions.7.32 €Active Solution Chambers1
    EXTRA MODULE: Flashlight Lamp CubeLight Source7.2 €Flashlight1

    Parts to print

    Additional components

    • Check out the RESOURCES for more information!
    • 1 × Linear Polarizing Sheet 🢂
    • 24 × 5 mm Ball magnets 🢂
    • 20 x Screws DIN912 ISO 4762 - M3×12 mm 🢂
    • 6 x Screws DIN912 ISO 4762 M2×16 mm 🢂
    • 2 x metal plates
    • Microscope Rectangular Coverslips 🢂
    • NOT USED 1 × flashlight 🢂

    Assembly

    Results

    We printed and assembled two Linear Polarizer and Active Solution Chamber module parts. Then, we bought the necessary components and inserted them into cubes.
    You can see the datils of the Active Solution Chamber designs.

    Two different chamber design is shown in the image below. Left-side chamber has a container only for 1 active solution. In the other one, two different mixtures can be observed in the same time.

    You can find the basic version of Polarization Using Optically Active Solution experiment with an additional flashlight source below. Depends on the experiment place conditions, you can add an extra light source.

    The chamber module was inserted between two linear polarizers, the Crossed Polarizers. We can observe the direct effect of the angle between two linear polarizers in the video below. The intensity of passing light through crossed polarizers changes when the direction angle of the polarization filter changes 360 degrees.

    Experimental result of two optically active solutions is shown in the video:

    • Left Is Grape Sugar-water Solution (Glucose) Table,
    • Right Is Sugar-water Solution (Sucrose

    References

    [1] Gal, J. (2017). Pasteur and the art of chirality. Nature Chemistry, 9(7), 604–605. https://doi.org/10.1038/nchem.2790

    [2] Logiurato, F. (2018). Teaching Light Polarization by Putting Art and Physics Together. The Physics Teachers, 1–5. https://arxiv.org/ftp/arxiv/papers/1803/1803.09645.pdf

    New Ideas

    We are open to new ideas. Please contribute the project freely, this place is a free country which is built by codes and machines :robot:

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/index.html index 56151b13b..e904995a8 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -19,7 +19,7 @@ Birefringence is the optical property of a material that has a dependent refractive index to polarization and propagation direction of light. These optically anisotropic materials are said to be birefringent.

    In general, birefringence is the double refraction of light. Each incoming light through the birefringent material such as calcite crystal is split by polarization into two rays; an ordinary and an extraordinary ray. These rays have different paths and polarizations.

    Common birefringent materials are;

    * best characterized birefringent materials are crystals
    * many plastics under mechanical stress such as cellophane or plastic boxes
    * many biological materials such as collagen, found in cartilage, tendon, bone, corneas, and several other areas in the body or some proteins.
    Polarized light microscopy is commonly used in biological tissue.

    Birefringence is used in many optical and medical devices. In medical applications, it can be used for the measurement of the optical nerve thickness or the diagnosis of glaucoma.

    Well then, what is the connection with polarization?

    Let's think. You ordered a new T-shirt from Amazon. You tried it and liked it. How beautiful! But wait. You can use a plastic cargo package for a polarization experiment and demonstrate the stress birefringence of a plastic sheet easily. Yesss, you can make science using 'garbage' too.
    Let's look at that more closely!

    Polarizers are frequently used to detect stress in plastics; birefringence. In this experiment, we use basic materials from our daily life as samples and see birefringence with our naked eyes. Let's collect simple objects such as plastic boxes, plastic cutlery (Image 1) or plastic packages. Even we can prepare our own birefringent object (Image 2) using a plastic punched pocket and sticky tape.

    Image 1 :

    Image 2:

    Stress Birefringence

    Stress birefringence results with stressed or deformation of isotropic materials and applying stressed causes a loss of physical isotropy and generates birefringence.

    How can stress be applied? Externally or as another method can be used. A birefringent plastic object is manufactured using injection molding and is cooled.

    Parts

    Modules for this setup

    NamePropertiesPriceLink#
    4×1 BaseplateSkeleton of the System5.47 €Base-plate1
    MODULE: Polarizer CubeIt holds the linearly polarizing filter8.62 €Linear Polarizer2
    MODULE: Sample Holder CubeIt holds the Birefringent Samples3.47 €Sample Holder1
    EXTRA MODULE: Screen Holder CubeIt holds the Display Screen (Not Used in Practice)1 €Screen1
    EXTRA MODULE: Flashlight Lamp CubeLight Source7.2 €Flashlight1

    Parts to print

    Additional components

    • Check out the RESOURCES for more information!
    • 1 × Linear Polarizing Sheet 🢂
    • 16 × 5 mm Ball magnets 🢂
    • 24 x Screws DIN912 ISO 4762 - M3×12 mm 🢂
    • 6 x Screws DIN912 ISO 4762 M2×16 mm 🢂
    • Birefringent Materials or Samples
    • NOT USED 1 × flashlight 🢂

    Assembly

    If all written modules are used in the experiment, the setup will look like:

    Results

    You will find the basic version of the Stress Birefringence experiment without an extra light source and sample holder below.
    A sample is placed into the Crossed Polarizers in the setup, and color patterns can be observed clearly. The polarization of a light ray is rotated after passing through a birefringent material and the amount of rotation is dependent on wavelength.

    The printed cube parts were assembled and the result images of experiences were taken for 3 different birefringent materials.

    In the 1st Experiment, we prepared our sample using a plastic punched pocket and randomly applied sticky tape on it.

    In the 2nd Experiment, we used a plastic piece as a sample for the setup.

    In the 3rd Experiment, plastic cutlery was used.

    New Ideas

    Rat-Tat! We are here to hear new ideas. Please don't shy and have a heart-to-heart talk with us. 💝

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/index.html b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/index.html index 22c817c9b..e9301cd8e 100644 --- a/docs/Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/index.html @@ -10,8 +10,8 @@ - - + +
    @@ -20,7 +20,7 @@ Some light will pass through three polarizers if we add a third polarizer between these two crossed polarizers.

    In the Three Linear Polarizers setup, the amount of light passing through the polarizers can be calculated by the Law of Malus, cosine-squared law.

    I : the intensity of passing light through polarizers (total amount of passing light into three polarizers setup)
    I(0) : the intensity of incoming light
    θ : the angle between the transmission axes of two polarizers

    The polarization direction of the first polarizer is oriented vertically to the incoming beam at 0 degrees. Incoming unpolarized light passes through the first polarizer (linear s-polarized). After the first polarizer, the vertically polarized light travels to the second linear polarizer, which is rotated by 45 degrees to the first polarizer. Then the traveling light passes through the third polarizer (linear p-polarized), oriented at 90 degrees tilted for the first polarizer. Due to the orientation angle of each linear polarizer, transmitted light intensity changes based on the Law of Malus.

    Three linear polarizers are used in the experiment, and each of them has different angles concerning the transmission axis.

    Parts

    Modules for this setup

    NamePropertiesPriceLink#
    4×1 BaseplateSkeleton of the System5.47 €Base-plate1
    MODULE: Polarizer CubeIt holds the linearly polarizing filter13.43 €Linear Polarizer3
    EXTRA MODULE: Sample Holder CubeIt holds the Sample (Not Used in Practice)1.3 €Sample Holder1
    EXTRA MODULE: Screen Holder CubeIt holds the Display Screen (Not Used in Practice)1 €Screen1
    EXTRA MODULE: Flashlight Lamp CubeLight Source7.2 €Flashlight1

    Parts to print

    Additional components

    • Check out the RESOURCES for more information!
    • 1 × Linear Polarizing Sheet 🢂
    • 16 × 5 mm Ball magnets 🢂
    • 24 x Screws DIN912 ISO 4762 - M3×12 mm 🢂
    • 9 x Screws DIN912 ISO 4762 M2×16 mm 🢂
    • NOT USED 1 × flashlight 🢂

    If all written modules are used in the experiment, the setup will look like:

    Assembly

    Results

    The basic version of the Three Polarizers experiment without a specific sample and extra light source below is demonstrated. You can see the experiment images below.

    The effect of the angle between two linear polarizers can be seen in the video below. The intensity of passing light on the eye of the observer through the polarizers changes when the wheel insert of polarization filter wheeled 45 angles.

    New Ideas

    We are open to new idea source (dad joke about the open-source project 😐 ). Just open a new issue and spread your idea!

    - - + + \ No newline at end of file diff --git a/docs/Toolboxes/DiscoveryPolarization/index.html b/docs/Toolboxes/DiscoveryPolarization/index.html index 916c17d8a..9e52bb4ef 100644 --- a/docs/Toolboxes/DiscoveryPolarization/index.html +++ b/docs/Toolboxes/DiscoveryPolarization/index.html @@ -10,13 +10,13 @@ - - + + - - + + \ No newline at end of file diff --git a/docs/Toolboxes/index.html b/docs/Toolboxes/index.html index 5f63de68e..eba12451d 100644 --- a/docs/Toolboxes/index.html +++ b/docs/Toolboxes/index.html @@ -10,13 +10,13 @@ - - + +

    Educational Kits

    CoreBox: Entry-Level Education Box

     - Features and Specifications
    - Assembling the CoreBox
    - Core Lens, Telescope, and Microscope

    Discovery Kit: Extension of CoreBox

     - Adding Modules to the Discovery Kit
    - Enhanced Functionality

    Interferometer Kit

     - You can build a Michelson Interferometer
    - Try enhancing it to become a MAch Zhender Microscope
    - Ultimatively test the microscope extension and reconstruct images using holography
    -
    - - + + \ No newline at end of file diff --git a/docs/WORKSHOPS/Workshop Nigeria/index.html b/docs/WORKSHOPS/Workshop Nigeria/index.html index eafa9395d..5aa5c1e6a 100644 --- a/docs/WORKSHOPS/Workshop Nigeria/index.html +++ b/docs/WORKSHOPS/Workshop Nigeria/index.html @@ -10,13 +10,13 @@ - - + +

    UC2 Microscopy Building Workshop at BioRTC Yobe University, Nigeria

    Welcome to the UC2 Microscopy Workshop! 📷🔬

    If you've ever been curious about the fascinating world of microscopy, you're in the right place! In this workshop, we will take you on a journey through the core concepts of microscopy, starting with lenses and interferometry, where you'll learn how different waves superpose to create powerful imaging techniques.

    Our approach centers around the open-source modular toolbox, UC2. This revolutionary system is built on the idea that every optical, mechanical, or electrical component can be mounted inside a compact 50mm cube. With a wide variety of components already available in our extensive library, you'll have the flexibility to design and build your own optical setups, limited only by your creativity.

    We'll kick off the workshop with the fully lensless microscope, utilizing just an LED, spatial filter, sample, and camera sensor. As we progress, you'll upgrade to a finite corrected objective lens, improving the resolution and focusing capabilities on the camera chip. We'll explore different microscopy techniques, including directional microscopy and light sheet microscopy, where the alignment of light enhances optical resolution along the axis.

    The heart of our workshop is the UC2-produced microscope, aptly named "sub." Although basic, it is the perfect tool to grasp the fundamental concepts of microscopy. From there, the possibilities are limitless as you delve into designing and printing specific inserts to adapt the system for your experiments.

    The UC2 system was born out of a quest for a small, affordable microscope for lifestyle microscopy imaging inside an incubator. As it evolved, we expanded its modularity, adding different contrast mechanisms and extensions like fluorescence and more. The success of this open-source initiative has been demonstrated through various publications, showcasing its applications in structured illumination microscopy, focal microscopene, and beyond.

    Our mission is to bridge the gap between education and real-world applications, providing a platform where anyone, regardless of experience, can get creative with optics. We strive to make microscopy accessible and affordable for all, and we are excited to announce the birth of our company, now headquartered in Gina, as we embark on a journey to revolutionize microscopy.

    So, if you're ready to dive into the world of microscopy, join us in this workshop as we build and enhance simple microscopes, bring them to life with software and image processing, and unlock the incredible potential of UC2 and open-source hardware.

    Let's embark on this adventure together! Happy exploring! 🚀✨

    Inline Holographic Microscope:

    Simple SEEED ESP32S3 Xiao Sense-based microscope:

    Michelson Interferometer:

    Light-sheet microscope:

    - - + + \ No newline at end of file diff --git a/docs/WORKSHOPS/index.html b/docs/WORKSHOPS/index.html index c10a3c265..93767166b 100644 --- a/docs/WORKSHOPS/index.html +++ b/docs/WORKSHOPS/index.html @@ -10,13 +10,13 @@ - - + +

    openUC2 Workshops

    From time to time we try to share our knowledge in various formats. If you want to learn more how you can have an openUC2 workshop near you, please contact us! We would be happy to introduce you into the world of open optics.

    - - + + \ No newline at end of file diff --git a/docs/intro/index.html b/docs/intro/index.html index 0a443313b..9d7015a5e 100644 --- a/docs/intro/index.html +++ b/docs/intro/index.html @@ -10,13 +10,13 @@ - - + +

    openUC2 Documentation

    Here you can find all information to enhance, repair, improve, use, communicate,.... our optical toolbox openUC2. Did not find what you were looking for? No problem. Send us a mail or write an issue in our github repository https://github.com/openUC2/UC2-GIT/issues.

    Looking for...

    TopicDescription
    Geometrical Optics [EN, DE]
    Wave Optics [EN]
    Microsocpy [EN, DE]
    Telescope [EN, DE]
    Electronics (BOX) [EN]
    Electronics (PCB) [EN]
    ImSwitch [EN]
    Light-Sheet Microscopy [EN]
    STORM Microscopy [EN]
    openUC2 FiveD [EN]
    Workshops [EN]]
    Production [EN]
    - - + + \ No newline at end of file diff --git a/index.html b/index.html index d855e7952..63d8c4bd8 100644 --- a/index.html +++ b/index.html @@ -10,13 +10,13 @@ - - + +

    openUC2 Documentation

    Seeing is believing. But better with the docs!

    Learning Kits (Explorer/Discovery)

    Learning Kits (Explorer/Discovery)

    Step by step guides to learn everything about optics.

    Cutting the Edge! (Investigator)

    Cutting the Edge! (Investigator)

    Get the most of your ready-to-use microscopes.

    Anything else.

    Anything else.

    Anything that is yet missing.

    - - + + \ No newline at end of file diff --git a/markdown-page/index.html b/markdown-page/index.html index 26a900b3d..aa5c311a7 100644 --- a/markdown-page/index.html +++ b/markdown-page/index.html @@ -10,13 +10,13 @@ - - + +

    Markdown page example

    You don't need React to write simple standalone pages.

    - - + + \ No newline at end of file diff --git a/search/index.html b/search/index.html index 1d724f4c6..08612add7 100644 --- a/search/index.html +++ b/search/index.html @@ -10,13 +10,13 @@ - - + +

    Search the documentation

    - - + + \ No newline at end of file diff --git a/sitemap.xml b/sitemap.xml index 59e4feb94..29d9ff539 100644 --- a/sitemap.xml +++ b/sitemap.xml @@ -1 +1 @@ -https://docs.youseetoo.org/blogweekly0.5https://docs.youseetoo.org/blog/archiveweekly0.5https://docs.youseetoo.org/blog/first-blog-postweekly0.5https://docs.youseetoo.org/blog/long-blog-postweekly0.5https://docs.youseetoo.org/blog/mdx-blog-postweekly0.5https://docs.youseetoo.org/blog/tagsweekly0.5https://docs.youseetoo.org/blog/tags/docusaurusweekly0.5https://docs.youseetoo.org/blog/tags/facebookweekly0.5https://docs.youseetoo.org/blog/tags/helloweekly0.5https://docs.youseetoo.org/blog/tags/holaweekly0.5https://docs.youseetoo.org/blog/welcomeweekly0.5https://docs.youseetoo.org/markdown-pageweekly0.5https://docs.youseetoo.org/searchweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/Encoderweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/Homeweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/INTROweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/LEDArrayweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/Motorweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/PinConfigweekly0.5https://docs.youseetoo.org/docs/Electronics/PS4-Controllerweekly0.5https://docs.youseetoo.org/docs/Electronics/UC2-ESP/Setup_Buildenvironmentweekly0.5https://docs.youseetoo.org/docs/Electronics/UC2-REST/ESP32_Messaging_Callbackweekly0.5https://docs.youseetoo.org/docs/Electronics/UC2-REST/ESP32_Motorweekly0.5https://docs.youseetoo.org/docs/Electronics/UC2-REST/INTROweekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e1weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e2weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e3weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e5weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e5.1weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e5.2weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e6weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e7weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e8weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e9weekly0.5https://docs.youseetoo.org/docs/ImSwitch/DahengCameraweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchClientweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchConfigweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchExperimentalweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallerweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallerCondaweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallUbuntuweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallWindowsweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchUpdateweekly0.5https://docs.youseetoo.org/docs/introweekly0.5https://docs.youseetoo.org/docs/Investigator/FlowStopper/weekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightSheetweekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightSheet%20Sampleweekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightsheetCalibrationweekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightSheetOldweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Electronicsweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Illuminationweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Mainweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Resultsweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Softwareweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Stabilityweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/AlignLaserweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/FiveD_v1weekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/FiveD_v2weekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/FiveD_v3weekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/HistoScanweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/MCTPluginweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/ROIScannerweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/SetupPhasecontrastweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/SetupTubelensweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/StageCalibrationweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/StageScanningweekly0.5https://docs.youseetoo.org/docs/Investigator/ZMicroscope/UpackZMicroscopeweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscopeweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/APERTURESweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/BEAMSPLITTERweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/Cameraweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/Eyepieceweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/KIN_MIR_45weekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/KIN_MIR_90weekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/KIN_XY_LASERweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/LENSweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/MIR_45weekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/POLARIZER_ROTATINGweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/SAMPLE_HOLDErweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/STAGE_Z_MANUALweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/STAGE_Z_NEMAweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/TORCHweekly0.5https://docs.youseetoo.org/docs/Toolboxes/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCNweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreLensweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/coreMicroscopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreTelescopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxENweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/FRENCH/CoreLensFRweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/FRENCH/coreMicroscopeFRweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/FRENCH/CoreTelescopeFRweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/FRENCH/uc2miniboxFRweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/CoreLensweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/coreMicroscopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/CoreTelescopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/uc2miniboxDEweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/Opticsintroweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/Smartphone%20Microscopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/SPANISH/core_introweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/MicroscopyCoreweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/Automation_introweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/Camera%20Setupweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/seeedmicroscopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/spectrometerweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/XYZ_stage_micoweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryFluorescence/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/InlineHolographyweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/Interferometer_introweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/MachZenderInterferometerweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometerweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorialweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPhaseMicroscopy/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPhaseMicroscopy/DPCmicroscopyweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/weekly0.5https://docs.youseetoo.org/docs/WORKSHOPS/weekly0.5https://docs.youseetoo.org/docs/WORKSHOPS/Workshop%20Nigeriaweekly0.5https://docs.youseetoo.org/weekly0.5 \ No newline at end of file +https://docs.youseetoo.org/blogweekly0.5https://docs.youseetoo.org/blog/archiveweekly0.5https://docs.youseetoo.org/blog/first-blog-postweekly0.5https://docs.youseetoo.org/blog/long-blog-postweekly0.5https://docs.youseetoo.org/blog/mdx-blog-postweekly0.5https://docs.youseetoo.org/blog/tagsweekly0.5https://docs.youseetoo.org/blog/tags/docusaurusweekly0.5https://docs.youseetoo.org/blog/tags/facebookweekly0.5https://docs.youseetoo.org/blog/tags/helloweekly0.5https://docs.youseetoo.org/blog/tags/holaweekly0.5https://docs.youseetoo.org/blog/welcomeweekly0.5https://docs.youseetoo.org/markdown-pageweekly0.5https://docs.youseetoo.org/searchweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/Encoderweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/Homeweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/INTROweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/LEDArrayweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/Motorweekly0.5https://docs.youseetoo.org/docs/Electronics/APIDescription/PinConfigweekly0.5https://docs.youseetoo.org/docs/Electronics/PS4-Controllerweekly0.5https://docs.youseetoo.org/docs/Electronics/UC2-ESP/Setup_Buildenvironmentweekly0.5https://docs.youseetoo.org/docs/Electronics/UC2-REST/ESP32_Messaging_Callbackweekly0.5https://docs.youseetoo.org/docs/Electronics/UC2-REST/ESP32_Motorweekly0.5https://docs.youseetoo.org/docs/Electronics/UC2-REST/INTROweekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e1weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e2weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e3weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e5weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e5.1weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e5.2weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e6weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e7weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e8weekly0.5https://docs.youseetoo.org/docs/Electronics/uc2e9weekly0.5https://docs.youseetoo.org/docs/ImSwitch/DahengCameraweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchClientweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchConfigweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchExperimentalweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallerweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallerCondaweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallUbuntuweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchInstallWindowsweekly0.5https://docs.youseetoo.org/docs/ImSwitch/ImSwitchUpdateweekly0.5https://docs.youseetoo.org/docs/introweekly0.5https://docs.youseetoo.org/docs/Investigator/FlowStopper/weekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightSheetweekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightSheet%20Sampleweekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightsheetCalibrationweekly0.5https://docs.youseetoo.org/docs/Investigator/Lightsheet/LightSheetOldweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Electronicsweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Illuminationweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Mainweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Resultsweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Softwareweekly0.5https://docs.youseetoo.org/docs/Investigator/STORM/Stabilityweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/AlignLaserweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/FiveD_v1weekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/FiveD_v2weekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/FiveD_v3weekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/HistoScanweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/MCTPluginweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/ROIScannerweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/SetupPhasecontrastweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/SetupTubelensweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/SmartMicroscopyweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/StageCalibrationweekly0.5https://docs.youseetoo.org/docs/Investigator/XYZMicroscope/StageScanningweekly0.5https://docs.youseetoo.org/docs/Investigator/ZMicroscope/UpackZMicroscopeweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/INVESTIGATOR/ProductionXYZMicroscopeweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/APERTURESweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/BEAMSPLITTERweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/Cameraweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/Eyepieceweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/KIN_MIR_45weekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/KIN_MIR_90weekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/KIN_XY_LASERweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/LENSweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/MIR_45weekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/POLARIZER_ROTATINGweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/SAMPLE_HOLDErweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/STAGE_Z_MANUALweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/STAGE_Z_NEMAweekly0.5https://docs.youseetoo.org/docs/PRODUCTION/Modules/TORCHweekly0.5https://docs.youseetoo.org/docs/Toolboxes/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/CHINESE/uc2miniboxCNweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreLensweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/coreMicroscopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/CoreTelescopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/ENGLISH/uc2miniboxENweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/FRENCH/CoreLensFRweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/FRENCH/coreMicroscopeFRweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/FRENCH/CoreTelescopeFRweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/FRENCH/uc2miniboxFRweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/CoreLensweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/coreMicroscopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/CoreTelescopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/GERMAN/uc2miniboxDEweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/Opticsintroweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/Smartphone%20Microscopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryCore/SPANISH/core_introweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_FinOptics/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/ALIGNMENT_InfOptics/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryDiffraction/MicroscopyCoreweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/Automation_introweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/Camera%20Setupweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/seeedmicroscopeweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/spectrometerweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryElectronics/XYZ_stage_micoweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryFluorescence/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/InlineHolographyweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/Interferometer_introweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/MachZenderInterferometerweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/MichelsonInterferometerweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryInterferometer/SoftwareTutorialweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPhaseMicroscopy/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPhaseMicroscopy/DPCmicroscopyweekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Brewster_Angle_Experiment/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Circular_Polarizer/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Crossed_Polarizers/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Many_Microscope_Slides_Experiment/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Newtons_Rings_Experiment/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Polarization_using_optically_active_solution/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Stress_Birefringence/weekly0.5https://docs.youseetoo.org/docs/Toolboxes/DiscoveryPolarization/APP_POL_Three_Polarizers/weekly0.5https://docs.youseetoo.org/docs/WORKSHOPS/weekly0.5https://docs.youseetoo.org/docs/WORKSHOPS/Workshop%20Nigeriaweekly0.5https://docs.youseetoo.org/weekly0.5 \ No newline at end of file