From afc18ac50330329264ab64864e1e817391cff5e6 Mon Sep 17 00:00:00 2001 From: Dan Bohus Date: Tue, 8 Feb 2022 22:18:44 +0000 Subject: [PATCH] Release 0.16.92.1 --- Directory.Build.props | 2 +- Psi.sln | 48 + .../Microsoft.Psi.Audio.Linux/AudioCapture.cs | 2 +- .../MFResampler.cs | 24 +- .../Microsoft.Psi.Audio.Windows/Operators.cs | 2 - .../WasapiCapture.cs | 48 +- .../WasapiRender.cs | 55 +- .../Microsoft.Psi.Audio/WaveFileHelper.cs | 12 +- .../WaveStreamSampleSource.cs | 92 ++ .../Test.Psi.Audio/Test.Psi.Audio.csproj | 2 +- .../CalibrationExtensions.cs | 434 ++++++- .../CameraIntrinsics.cs | 163 ++- .../DepthDeviceCalibrationInfo.cs | 12 +- .../DepthPixelSemantics.cs | 23 + .../ICameraIntrinsics.cs | 55 +- .../IDepthDeviceCalibrationInfo.cs | 12 +- .../Microsoft.Psi.Calibration.csproj | 1 + .../Test.Psi.Calibration/DistortionTests.cs | 23 +- .../Test.Psi.Calibration.csproj | 2 +- .../Annotations/Annotation.cs | 19 - .../Annotations/AnnotationAttributeSchema.cs | 49 + .../Annotations/AnnotationDefinition.cs | 153 --- .../Annotations/AnnotationSchema.cs | 199 ++++ .../Annotations/AnnotationSchemaDefinition.cs | 32 - .../AnnotationSchemaValueMetadata.cs | 38 - .../Annotations/AnnotationSchema{T}.cs | 75 -- .../Annotations/AnnotationValueSchema{T}.cs | 59 + .../Annotations/AnnotationValue{T}.cs | 45 + .../EnumerableAnnotationValueSchema{T}.cs | 61 + .../FiniteAnnotationSchemaValue{T}.cs | 33 - .../Annotations/FiniteAnnotationSchema{T}.cs | 66 -- .../Annotations/IAnnotationSchema.cs | 21 - .../Annotations/IAnnotationValue.cs | 28 + .../Annotations/IAnnotationValueSchema.cs | 31 + .../IEnumerableAnnotationValueSchema.cs | 20 + .../Annotations/Operators.cs | 156 +++ .../StringAnnotationValueSchema.cs | 27 + .../Annotations/TimeIntervalAnnotation.cs | 22 +- .../Annotations/TimeIntervalAnnotationSet.cs | 85 ++ .../Microsoft.Psi.Data/BatchProcessingTask.cs | 38 + .../BatchProcessingTaskAttribute.cs | 8 +- .../BatchProcessingTaskConfiguration.cs | 117 ++ .../BatchProcessingTaskMetadata.cs | 105 ++ .../BatchProcessingTaskOperators.cs | 41 + .../IBatchProcessingTask.cs | 26 + .../Data/Microsoft.Psi.Data/Json/JsonStore.cs | 4 +- .../Microsoft.Psi.Data}/ObservableObject.cs | 2 +- .../Microsoft.Psi.Data/SessionImporter.cs | 8 +- .../Data/Test.Psi.Data/Test.Psi.Data.csproj | 2 +- .../ImagingOperators.cs | 2 +- .../Microsoft.Psi.Imaging.Linux.csproj | 2 +- .../ImageFromStreamDecoder.cs | 36 +- .../ImageToGZipStreamEncoder.cs | 26 + .../ImagingOperators.cs | 13 +- .../Microsoft.Psi.Imaging.Windows.csproj | 2 + .../Microsoft.Psi.Imaging/DepthImage.cs | 32 +- .../DepthImageDecoder.cs | 2 +- .../DepthImageEncoder.cs | 2 +- .../EncodedDepthImage.cs | 12 + .../Microsoft.Psi.Imaging/EncodedImage.cs | 17 + .../Imaging/Microsoft.Psi.Imaging/Image.cs | 42 +- .../Microsoft.Psi.Imaging/ImageBase.cs | 26 + .../Microsoft.Psi.Imaging/ImageDecoder.cs | 2 +- .../Microsoft.Psi.Imaging/ImageEncoder.cs | 2 +- .../Microsoft.Psi.Imaging/ImageExtensions.cs | 582 +++++++--- .../Microsoft.Psi.Imaging/StreamOperators.cs | 215 +++- .../Microsoft.Psi.Imaging/ToPixelFormat.cs | 51 - .../Test.Psi.Imaging.Windows/ImageTester.cs | 127 +- .../Properties/AssemblyInfo.cs | 6 +- ...icrosoft.Psi.CognitiveServices.Face.csproj | 29 +- .../PersonGroupTasks.cs | 1 + ...Test.Psi.CognitiveServices.Language.csproj | 2 +- .../Test.Psi.CognitiveServices.Speech.csproj | 2 +- ...crosoft.Psi.MicrosoftSpeech.Windows.csproj | 2 +- .../MicrosoftSpeech.cs | 2 + .../MicrosoftSpeechIntentDetector.cs | 3 +- ...rosoftSpeechIntentDetectorConfiguration.cs | 3 +- .../MicrosoftSpeechRecognizer.cs | 5 +- .../MicrosoftSpeechRecognizerConfiguration.cs | 2 + .../README.md | 2 + .../ImageNet/ImageNetModelOutputParser.cs | 40 +- .../ImageNet/ImageNetModelRunner.cs | 7 +- .../Microsoft.Psi.Onnx.Cpu.csproj | 2 +- .../Microsoft.Psi.Onnx.Gpu.csproj | 4 +- .../Onnx/Test.Psi.Onnx/Test.Psi.Onnx.csproj | 2 +- .../Microsoft.Psi.ROS.fsproj | 3 +- .../AzureKinectBodyListVisualizationObject.cs | 2 +- .../AzureKinectBodyVisualizationObject.cs | 275 ++--- .../AzureKinectBodyTracker.cs | 8 +- .../AzureKinectCore.cs | 3 - .../AzureKinectSensor.cs | 2 +- .../Microsoft.Psi.AzureKinect.x64.csproj | 2 +- .../KinectFaceDetector.cs | 4 +- .../KinectBodyListVisualizationObject.cs | 3 +- .../KinectBodyVisualizationObject.cs | 2 +- .../KinectInternalCalibration.cs | 492 +------- .../KinectSensor.cs | 9 +- .../Test.Psi.Kinect.Windows.x64/Mesh.cs | 556 +++++---- .../MediaCapture.cs | 8 +- .../AssemblyInfo.cpp | 6 +- .../AssemblyInfo.rc | Bin 5104 -> 5104 bytes .../MP4Writer.cpp | 14 +- .../HoloLens2ResearchMode.def | 3 + .../HoloLens2ResearchMode.vcxproj | 173 +++ .../HoloLens2ResearchMode.vcxproj.filters | 48 + .../HoloLens2ResearchMode/PropertySheet.props | 16 + .../HoloLens2ResearchMode/Readme.md | 14 + .../ResearchModeAccelFrame.cpp | 39 + .../ResearchModeAccelFrame.h | 21 + .../HoloLens2ResearchMode/ResearchModeApi.h | 236 ++++ .../ResearchModeCameraSensor.cpp | 88 ++ .../ResearchModeCameraSensor.h | 27 + .../ResearchModeGyroFrame.cpp | 39 + .../ResearchModeGyroFrame.h | 21 + .../ResearchModeImuSensor.cpp | 60 + .../ResearchModeImuSensor.h | 23 + .../ResearchModeMagFrame.cpp | 39 + .../ResearchModeMagFrame.h | 21 + .../ResearchModeSensorDepthFrame.cpp | 59 + .../ResearchModeSensorDepthFrame.h | 22 + .../ResearchModeSensorDevice.cpp | 167 +++ .../ResearchModeSensorDevice.h | 32 + .../ResearchModeSensorDevice.idl | 173 +++ .../ResearchModeSensorVlcFrame.cpp | 57 + .../ResearchModeSensorVlcFrame.h | 22 + .../HoloLens2ResearchMode/packages.config | 4 + .../HoloLens2ResearchMode/pch.cpp | 4 + .../MixedReality/HoloLens2ResearchMode/pch.h | 9 + .../Accelerometer.cs | 34 + .../DepthCamera.cs | 144 +++ .../DepthCameraConfiguration.cs | 50 + .../Gyroscope.cs | 33 + .../ImageToGzipStreamEncoder.cs | 26 + .../ImageToJpegStreamEncoder.cs | 49 + .../Magnetometer.cs | 33 + ...t.Psi.MixedReality.UniversalWindows.csproj | 158 +++ .../MixedReality.cs | 124 ++ .../MixedRealityCapturePerspective.cs | 22 + .../MixedRealityCaptureVideoEffect.cs | 61 + .../Operators.cs | 42 + .../PhotoVideoCamera.cs | 541 +++++++++ .../PhotoVideoCameraConfiguration.cs | 91 ++ .../Properties/AssemblyInfo.cs | 33 + ...t.Psi.MixedReality.UniversalWindows.rd.xml | 33 + .../ResearchModeCamera.cs | 321 +++++ .../ResearchModeImu.cs | 145 +++ .../SceneUnderstanding.cs | 243 ++++ .../SceneUnderstandingConfiguration.cs | 46 + .../SpatialAnchorHelper.cs | 147 +++ .../SpatialAnchorsSource.cs | 56 + .../UnsafeNative.cs | 30 + .../VisibleLightCamera.cs | 102 ++ .../VisibleLightCameraConfiguration.cs | 51 + .../stylecop.json | 16 + .../HandVisualizationObject.cs | 27 + .../HandVisualizationObjectAdapter.cs | 73 ++ ....MixedReality.Visualization.Windows.csproj | 51 + .../stylecop.json | 16 + .../CalibrationPointsMap.cs | 46 + .../Microsoft.Psi.MixedReality/EyesSensor.cs | 74 ++ .../Microsoft.Psi.MixedReality/Hand.cs | 99 ++ .../HandJointIndex.cs | 146 +++ .../Microsoft.Psi.MixedReality/Handle.cs | 72 ++ .../Microsoft.Psi.MixedReality/HandsSensor.cs | 107 ++ .../Microsoft.Psi.MixedReality/HeadSensor.cs | 65 ++ .../Microsoft.Psi.MixedReality/Microphone.cs | 93 ++ .../MicrophoneConfiguration.cs | 25 + .../Microsoft.Psi.MixedReality.csproj | 43 + .../Microsoft.Psi.MixedReality/Operators.cs | 271 +++++ .../Renderers/Mesh3DListStereoKitRenderer.cs | 76 ++ .../Renderers/MeshStereoKitRenderer.cs | 65 ++ .../Renderers/ModelBasedStereoKitRenderer.cs | 176 +++ .../Rectangle3DListStereoKitRenderer.cs | 89 ++ .../SceneObjectCollection.cs | 139 +++ .../SpatialSound.cs | 115 ++ .../StereoKitComponent.cs | 54 + .../StereoKitTransforms.cs | 23 + .../Microsoft.Psi.MixedReality/stylecop.json | 16 + .../Properties/AssemblyInfo.cs | 6 +- .../AssemblyInfo.cpp | 6 +- .../Format/MessagePackFormat.cs | 4 +- .../Microsoft.Psi.Interop.csproj | 4 + .../Runtime/Microsoft.Psi.Interop/Readme.md | 6 +- .../Rendezvous/Operators.cs | 126 ++ .../Rendezvous/Readme.md | 5 + .../Rendezvous/Rendezvous.cs | 402 +++++++ .../Rendezvous/RendezvousClient.cs | 191 +++ .../Rendezvous/RendezvousClient.py | 209 ++++ .../Rendezvous/RendezvousRelay.cs | 210 ++++ .../Rendezvous/RendezvousServer.cs | 198 ++++ .../Serialization/Format{T}.cs | 97 ++ .../Serialization/IFormatDeserializer.cs | 2 +- .../Serialization/IFormatSerializer.cs | 2 +- .../Serialization/PersistentFormat.cs | 64 + .../Transport/FileSource.cs | 2 +- .../Transport/FileWriter.cs | 2 +- .../Transport/NetMQSource.cs | 12 +- .../Transport/NetMQWriter.cs | 20 +- .../Transport/NetMQWriter{T}.cs | 2 +- .../Microsoft.Psi.Interop/Transport/Readme.md | 4 +- .../Transport/TcpSource.cs | 161 +++ .../Transport/TcpWriter.cs | 106 ++ .../Microsoft.Psi/Common/DeliveryPolicy{T}.cs | 2 +- .../Common/Interpolators/Interpolator{T}.cs | 26 + .../ReproducibleInterpolator{T}.cs | 12 +- .../Runtime/Microsoft.Psi/Common/Platform.cs | 88 +- .../Microsoft.Psi/Common/RuntimeInfo.cs | 2 +- .../Runtime/Microsoft.Psi/Common/Shared.cs | 18 +- .../Microsoft.Psi/Common/SharedContainer.cs | 3 + .../Microsoft.Psi/Common/UnmanagedArray.cs | 3 + .../Microsoft.Psi/Common/UnmanagedBuffer.cs | 3 + .../Microsoft.Psi/Components/Aggregator.cs | 2 +- .../Components/AsyncConsumerProducer.cs | 2 +- .../Components/ConsumerProducer.cs | 2 +- .../Microsoft.Psi/Components/EventSource.cs | 2 +- .../Runtime/Microsoft.Psi/Components/Fuse.cs | 2 +- .../Microsoft.Psi/Components/Generator.cs | 2 +- .../Microsoft.Psi/Components/Generator{T}.cs | 4 +- .../Runtime/Microsoft.Psi/Components/Join.cs | 2 +- .../Join{TPrimary,TSecondary,TOut}.cs | 6 +- .../Runtime/Microsoft.Psi/Components/Merge.cs | 2 +- .../Microsoft.Psi/Components/Merger.cs | 2 +- .../Runtime/Microsoft.Psi/Components/Pair.cs | 4 +- .../Components/ParallelFixedLength.cs | 4 +- .../Components/ParallelSparseDo.cs | 2 +- .../Components/ParallelSparseSelect.cs | 16 +- .../Components/ParallelVariableLength.cs | 8 +- .../Microsoft.Psi/Components/Processor.cs | 11 +- .../Components/RelativeIndexWindow.cs | 2 +- .../Components/RelativeTimeWindow.cs | 2 +- .../Components/SerializerComponent.cs | 2 +- .../Components/SimpleConsumer.cs | 2 +- .../Microsoft.Psi/Components/Splitter.cs | 2 +- .../Microsoft.Psi/Components/Timer{TOut}.cs | 2 +- .../Runtime/Microsoft.Psi/Components/Zip.cs | 2 +- .../Microsoft.Psi/Connectors/Connector.cs | 15 +- .../Microsoft.Psi/Connectors/IConnector.cs | 6 +- .../Runtime/Microsoft.Psi/Data/Exporter.cs | 17 +- .../Runtime/Microsoft.Psi/Data/Importer.cs | 2 +- .../Runtime/Microsoft.Psi/Data/PsiStore.cs | 6 +- .../Data/PsiStoreStreamReader.cs | 12 +- .../Diagnostics/DiagnosticsCollector.cs | 20 +- .../Diagnostics/DiagnosticsSampler.cs | 2 +- .../Diagnostics/PipelineDiagnostics.cs | 222 +++- .../Microsoft.Psi/Executive/Pipeline.cs | 9 +- .../Microsoft.Psi/Executive/Subpipeline.cs | 16 + .../Microsoft.Psi/Operators/Connectors.cs | 30 +- .../Runtime/Microsoft.Psi/Operators/Fuses.cs | 119 +- .../Microsoft.Psi/Operators/Generators.cs | 18 +- .../Runtime/Microsoft.Psi/Operators/Joins.cs | 220 ++-- .../Microsoft.Psi/Operators/Pickers.cs | 85 +- .../Runtime/Microsoft.Psi/Operators/Time.cs | 20 +- .../Runtime/Microsoft.Psi/Operators/Timers.cs | 4 +- .../Persistence/InfiniteFileReader.cs | 12 +- .../Persistence/InfiniteFileWriter.cs | 46 +- .../Persistence/MessageReader.cs | 2 - .../Persistence/MessageWriter.cs | 4 - .../Persistence/MetadataCache.cs | 2 +- .../Persistence/PageIndexCache.cs | 4 +- .../Persistence/PsiStoreCommon.cs | 6 + .../Persistence/PsiStoreMonitor.cs | 161 +++ .../Persistence/PsiStoreReader.cs | 25 +- .../Persistence/PsiStoreWriter.cs | 104 +- .../Remoting/RemoteClockExporter.cs | 110 ++ .../Remoting/RemoteClockImporter.cs | 135 +++ .../Microsoft.Psi/Remoting/RemoteExporter.cs | 150 +-- .../Microsoft.Psi/Remoting/RemoteImporter.cs | 52 +- .../Serialization/ArraySerializer.cs | 5 +- .../Serialization/BufferSerializer.cs | 3 + .../Serialization/ByteArraySerializer.cs | 3 + .../Serialization/ClassSerializer.cs | 3 + .../Serialization/EnumerableSerializer.cs | 5 +- .../Serialization/ISerializer.cs | 5 + .../Serialization/ImmutableSerializer.cs | 3 + .../Serialization/KnownSerializers.cs | 10 +- .../Serialization/MemoryStreamSerializer.cs | 6 + .../Serialization/NonSerializer.cs | 10 + .../Microsoft.Psi/Serialization/RefHandler.cs | 4 + .../Serialization/SimpleArraySerializer.cs | 3 + .../Serialization/SimpleSerializer.cs | 3 + .../Serialization/StringArraySerializer.cs | 3 + .../Serialization/StringSerializer.cs | 3 + .../Serialization/StructHandler.cs | 4 + .../Serialization/StructSerializer.cs | 3 + .../Microsoft.Psi/Streams/Emitter{T}.cs | 32 +- .../Test.Psi/CustomSerializationTester.cs | 2 + Sources/Runtime/Test.Psi/EmitterTester.cs | 72 ++ Sources/Runtime/Test.Psi/FunctionalTests.cs | 2 + Sources/Runtime/Test.Psi/JoinTests.cs | 246 ++-- Sources/Runtime/Test.Psi/OperatorTests.cs | 37 +- Sources/Runtime/Test.Psi/PairTests.cs | 10 +- Sources/Runtime/Test.Psi/PersistenceTest.cs | 9 +- .../Runtime/Test.Psi/SerializationTester.cs | 2 + Sources/Runtime/Test.Psi/Test.Psi.csproj | 2 +- .../Adapters.cs | 169 +++ .../Box3DListVisualizationObject.cs | 16 + .../Box3DVisualizationObject.cs | 317 +++++ ...ImageRectangle3DListVisualizationObject.cs | 16 + ...epthImageRectangle3DVisualizationObject.cs | 398 +++++++ .../ImageRectangle3DVisualizationObject.cs | 257 ++++ ...LinearVelocity3DListVisualizationObject.cs | 16 + .../LinearVelocity3DVisualizationObject.cs | 105 ++ .../Mesh3DListVisualizationObject.cs | 16 + .../Mesh3DVisualizationObject.cs | 124 ++ ...ial.Euclidean.Visualization.Windows.csproj | 47 + .../NumericalVoxelGridVisualizationObject.cs | 80 ++ .../PointCloud3DVisualizationObject.cs | 118 ++ .../Rectangle3DListVisualizationObject.cs | 16 + .../Rectangle3DVisualizationObject.cs | 277 +++++ .../VoxelGridVisualizationObject.cs | 82 ++ .../stylecop.json | 16 + .../AngularVelocity3D.cs | 131 +++ .../Bounds3D.cs | 160 +++ .../Microsoft.Psi.Spatial.Euclidean/Box3D.cs | 363 ++++++ .../CoordinateSystemVelocity3D.cs | 135 +++ .../Images/DepthImageRectangle3D.cs | 173 +++ .../Images/EncodedDepthImageRectangle3D.cs | 88 ++ .../Images/EncodedImageRectangle3D.cs | 88 ++ .../Images/ImageRectangle3D.cs | 199 ++++ .../Images/Operators.cs | 106 ++ .../LinearVelocity3D.cs | 101 ++ .../Microsoft.Psi.Spatial.Euclidean/Mesh3D.cs | 34 + .../Microsoft.Psi.Spatial.Euclidean.csproj | 46 + .../Operators.cs | 116 ++ .../PointCloud3D.cs | 364 ++++++ .../Rectangle3D.cs | 232 ++++ .../Microsoft.Psi.Spatial.Euclidean/Voxel.cs | 65 ++ .../VoxelGrid.cs | 148 +++ .../stylecop.json | 16 + .../SystemSpeechRecognizer.cs | 2 +- .../Tools/PsiStoreTool/PsiStoreTool.csproj | 1 + Sources/Tools/PsiStoreTool/Utility.cs | 2 +- .../Microsoft.Psi.PsiStudio/MainWindow.xaml | 55 +- .../MainWindow.xaml.cs | 12 +- .../MainWindowViewModel.cs | 890 +++++--------- .../PsiStudioSettings.cs | 29 +- .../Windows/LayoutNameWindow.xaml.cs | 14 +- .../Microsoft.Psi.PsiStudio/app.config | 12 + .../GraphViewer.cs | 4 +- .../CartesianChartVisualizationObject.cs | 124 +- .../XYPlotVisualizationObject.cs | 19 + .../Adapters/BoolToDoubleAdapter.cs | 18 - .../Adapters/InterfaceAdapter.cs | 21 + .../MathNetPoint2DToScatterPlotAdapter.cs | 22 + ...NullablePoint2DListToScatterPlotAdapter.cs | 23 + .../Adapters/NullableRectangleAdapter.cs | 23 + .../BatchProcessingTaskMetadata.cs | 91 -- .../Common/Axis.cs | 90 +- .../Common/ContextMenuName.cs | 5 + .../Common/DisplayImage.cs | 8 +- .../Common/FrameCounter.cs | 4 +- .../Common/TimelinePanelMousePosition.cs | 35 + .../Common/TimelineValueThreshold.cs | 81 ++ .../Common/ValueRange.cs | 49 + .../Data/DataManager.cs | 212 +--- .../Data/DataStoreReader.cs | 47 +- .../Data/IStreamDataProvider.cs | 10 + .../Data/IStreamIntervalProvider.cs | 35 +- .../Data/IStreamSummary.cs | 1 - .../Data/IStreamValueProvider.cs | 6 +- .../Data/IStreamValuePublisher{TSource}.cs | 8 +- .../Data/StreamDataProvider{T}.cs | 19 +- .../Data/StreamIntervalProvider.cs | 180 ++- .../Data/StreamSource.cs | 2 +- .../Data/StreamSummaryManager.cs | 83 +- .../StreamSummary{TSource,TDestination}.cs | 3 +- .../Data/StreamUpdateWithView{T}.cs | 1 - .../Data/StreamUpdate{T}.cs | 2 - .../Data/StreamValueProvider{TSource}.cs | 19 +- ...eamValuePublisher{TSource,TDestination}.cs | 14 +- .../DataTypes/Graph.cs | 37 + .../Helpers/DateTimeFormatHelper.cs | 19 +- .../Helpers/SizeFormatHelper.cs | 61 +- .../Helpers/TimeSpanFormatHelper.cs | 67 ++ .../Icons/close-panel.png | Bin 1478 -> 0 bytes .../Icons/layout-clear.png | Bin 1610 -> 1610 bytes .../Icons/layout-delete.png | Bin 0 -> 1610 bytes .../Icons/panel-2d.png | Bin 1496 -> 1496 bytes .../panel-showhide-smallbutton-faded.png | Bin 0 -> 1583 bytes .../Icons/panel-showhide-smallbutton.png | Bin 0 -> 1583 bytes .../Icons/panel-xy.png | Bin 0 -> 1496 bytes .../Icons/{panel-3d.png => panel-xyz.png} | Bin .../Icons/remove-smallbutton-faded.png | Bin 0 -> 1478 bytes ...Microsoft.Psi.Visualization.Windows.csproj | 34 +- .../Navigation/Navigator.cs | 63 +- .../Navigation/NavigatorRange.cs | 22 +- .../ObservableDataCollection{T}.cs | 2 +- .../ObservableDataItem{T}.cs | 4 +- .../ObservableKeyedCache{TKey,TItem}.cs | 2 +- .../ObservableSortedCollection{T}.cs | 15 +- .../ObservableTreeNodeObject.cs | 1 + .../PluginMap.cs | 127 +- .../Numeric/BoolRangeSummarizer.cs | 22 + .../Numeric/BoolSeriesRangeSummarizer.cs | 23 + .../Numeric/NullableBoolRangeSummarizer.cs | 22 + .../NullableBoolSeriesRangeSummarizer.cs | 23 + .../ViewModels/DatasetViewModel.cs | 106 +- .../ViewModels/PartitionViewModel.cs | 44 +- .../ViewModels/SessionViewModel.cs | 42 +- .../ViewModels/StreamContainerTreeNode.cs | 40 +- .../ViewModels/StreamTreeNode.cs | 81 +- .../Views/CanvasVisualizationPanelView.xaml | 105 ++ .../CanvasVisualizationPanelView.xaml.cs | 103 ++ .../Views/IContextMenuItemsSource.cs | 1 - .../InstantVisualizationContainerView.xaml | 20 +- .../InstantVisualizationContainerView.xaml.cs | 62 +- ...tantVisualizationPlaceholderPanelView.xaml | 2 +- .../Views/NavigatorView.xaml.cs | 29 +- .../StreamVisualizationObjectCanvasView.cs | 6 +- .../Views/TimelineView.xaml.cs | 2 - .../Views/TimelineVisualizationPanelView.xaml | 64 +- .../TimelineVisualizationPanelView.xaml.cs | 74 +- .../Views/VisualizationContainerView.xaml | 1 + .../Views/VisualizationContainerView.xaml.cs | 91 +- .../Views/VisualizationPanelView.cs | 34 +- .../AudioVisualizationObjectView.xaml.cs | 1 - .../DepthImageVisualizationObjectView.xaml | 36 +- .../DepthImageVisualizationObjectView.xaml.cs | 66 +- .../DepthImageVisualizationObjectViewBase.cs | 15 + ...DiagnosticsVisualizationObjectView.xaml.cs | 6 +- ...pelineDiagnosticsVisualizationPresenter.cs | 105 +- .../IFormattableVisualizationObjectView.xaml | 35 + ...FormattableVisualizationObjectView.xaml.cs | 19 + .../ImageVisualizationObjectView.xaml | 21 +- .../ImageVisualizationObjectView.xaml.cs | 26 +- .../ImageVisualizationObjectViewBase.cs | 15 + ...leListVisualizationObjectCanvasItemView.cs | 2 +- .../BoolSeriesVisualizationObjectView.xaml | 30 + .../BoolSeriesVisualizationObjectView.xaml.cs | 22 + .../BoolVisualizationObjectView.xaml | 30 + .../BoolVisualizationObjectView.xaml.cs | 22 + ...ableBoolSeriesVisualizationObjectView.xaml | 27 + ...eBoolSeriesVisualizationObjectView.xaml.cs | 20 + ...leBoolSeriesVisualizationObjectViewBase.cs | 14 + .../NullableBoolVisualizationObjectView.xaml | 27 + ...ullableBoolVisualizationObjectView.xaml.cs | 20 + ...NullableBoolVisualizationObjectViewBase.cs | 14 + ...cimalSeriesVisualizationObjectView.xaml.cs | 2 - ...oubleSeriesVisualizationObjectView.xaml.cs | 2 - ...FloatSeriesVisualizationObjectView.xaml.cs | 2 - ...leIntSeriesVisualizationObjectView.xaml.cs | 2 - ...eLongSeriesVisualizationObjectView.xaml.cs | 2 - .../PlotVisualizationObjectView.cs | 4 +- .../TextVisualizationObjectView.xaml | 19 +- ...otationTrackVisualizationObjectViewItem.cs | 81 ++ ...rvalAnnotationVisualizationObjectView.xaml | 8 +- ...lAnnotationVisualizationObjectView.xaml.cs | 237 ++-- ...alAnnotationVisualizationObjectViewItem.cs | 128 +- .../XYValueVisualizationObjectCanvasView.cs | 33 +- .../Views/XYVisualizationPanelView.xaml | 135 ++- .../Views/XYVisualizationPanelView.xaml.cs | 85 ++ .../Views/XYZVisualizationPanelView.xaml | 43 +- .../Views/XYZVisualizationPanelView.xaml.cs | 76 ++ .../VisualizationContext.cs | 199 ++-- .../AnnotationPropertyDescriptor.cs | 6 +- .../Annotations/AnnotationValueEditor.cs | 38 +- .../TimeIntervalAnnotationDisplayData.cs | 156 ++- .../TimeIntervalAnnotationDragInfo.cs | 34 +- .../TimeIntervalAnnotationEditEventArgs.cs | 10 +- ...meIntervalAnnotationVisualizationObject.cs | 1030 +++++++++++------ .../BillboardTextVisualizationObject.cs | 39 + ...CoordinateSystemListVisualizationObject.cs | 3 +- .../DepthImageVisualizationObject.cs | 52 + .../IFormattableVisualizationObject.cs | 76 ++ .../ImageVisualizationObjectBase{TData}.cs | 93 +- .../LabeledPoint3DListVisualizationObject.cs | 3 +- .../LabeledPoint3DVisualizationObject.cs | 2 +- .../Line3DListVisualizationObject.cs | 3 +- ...lVisual3DCollectionVisualizationObject.cs} | 23 +- ...lVisual3DDictionaryVisualizationObject.cs} | 27 +- .../ModelVisual3DGraphVisualizationObject.cs | 130 +++ ...> ModelVisual3DListVisualizationObject.cs} | 17 +- ...cs => ModelVisual3DVisualizationObject.cs} | 45 +- .../BoolSeriesVisualizationObject.cs | 38 + .../NumericPlot/BoolVisualizationObject.cs | 38 + .../NullableBoolSeriesVisualizationObject.cs | 38 + .../NullableBoolVisualizationObject.cs | 38 + .../NullableDecimalVisualizationObject.cs | 2 +- .../NumericSeriesVisualizationObject.cs | 4 +- .../PlotSeriesVisualizationObject.cs | 12 + .../PlotVisualizationObject{TData}.cs | 19 +- .../PipelineDiagnosticsVisualizationObject.cs | 56 +- .../Point3DGraphVisualizationObject.cs | 204 ++++ ...nt3DListAsPointCloudVisualizationObject.cs | 15 + ...Point3DListAsSpheresVisualizationObject.cs | 3 +- .../Rect3DListVisualizationObject.cs | 3 +- .../SkeletonVisualizationObject.cs | 91 ++ ...iewManualFocalLengthVisualizationObject.cs | 2 +- .../SpatialCameraViewVisualizationObject.cs | 16 +- ...eshManualFocalLengthVisualizationObject.cs | 2 +- ...epthCameraViewAsMeshVisualizationObject.cs | 2 +- ...oudManualFocalLengthVisualizationObject.cs | 2 +- ...meraViewAsPointCloudVisualizationObject.cs | 49 +- ...treamIntervalVisualizationObject{TData}.cs | 163 +-- .../StreamValueVisualizationObject{TData}.cs | 20 +- .../StreamVisualizationObject{TData}.cs | 164 ++- .../Text3DVisualizationObject.cs | 110 ++ .../TextVisualizationObject.cs | 2 +- .../TimeIntervalHistoryVisualizationObject.cs | 20 +- ...ableVisual3DDictionary{TKey,TVisual3D}.cs} | 18 +- ...cs => UpdatableVisual3DList{TVisual3D}.cs} | 16 +- .../VisualizationContainer.cs | 168 ++- .../VisualizationObject.cs | 98 +- .../XYValueVisualizationObject.cs | 35 +- .../CanvasVisualizationPanel.cs | 48 + .../InstantVisualizationContainer.cs | 46 +- .../InstantVisualizationPlaceholderPanel.cs | 2 +- .../TimelineVisualizationPanel.cs | 322 +++++- .../VisualizationPanels/VisualizationPanel.cs | 156 ++- .../VisualizationPanelFactory.cs | 2 + .../VisualizationPanelType.cs | 9 +- .../VisualizationPanelTypeAttribute.cs | 1 - .../XYVisualizationPanel.cs | 573 +++++++-- .../XYZVisualizationPanel.cs | 177 ++- .../VisualizerMetadata.cs | 20 +- .../VisualizerMetadataComparer.cs | 2 +- .../Windows/CreateAnnotationStreamWindow.xaml | 2 +- .../CreateAnnotationStreamWindow.xaml.cs | 36 +- .../Windows/GetParameterWindow.xaml | 61 + .../Windows/GetParameterWindow.xaml.cs | 91 ++ .../Windows/RunBatchProcessingTaskWindow.xaml | 72 +- .../RunBatchProcessingTaskWindow.xaml.cs | 111 +- .../RunBatchProcessingTaskWindowViewModel.cs | 247 ++++ .../OberservableSortedCollectionUnitTest.cs | 2 +- .../ObservableKeyedCacheUnitTest.cs | 2 +- .../Properties/AssemblyInfo.cs | 6 +- ThirdPartyNotices.txt | 28 + 527 files changed, 25216 insertions(+), 5526 deletions(-) create mode 100644 Sources/Audio/Microsoft.Psi.Audio/WaveStreamSampleSource.cs create mode 100644 Sources/Calibration/Microsoft.Psi.Calibration/DepthPixelSemantics.cs delete mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/Annotation.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationAttributeSchema.cs delete mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationDefinition.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchema.cs delete mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchemaDefinition.cs delete mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchemaValueMetadata.cs delete mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchema{T}.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationValueSchema{T}.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationValue{T}.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/EnumerableAnnotationValueSchema{T}.cs delete mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/FiniteAnnotationSchemaValue{T}.cs delete mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/FiniteAnnotationSchema{T}.cs delete mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/IAnnotationSchema.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/IAnnotationValue.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/IAnnotationValueSchema.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/IEnumerableAnnotationValueSchema.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/Operators.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/StringAnnotationValueSchema.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/Annotations/TimeIntervalAnnotationSet.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/BatchProcessingTask.cs rename Sources/{Runtime/Microsoft.Psi/Common => Data/Microsoft.Psi.Data}/BatchProcessingTaskAttribute.cs (91%) create mode 100644 Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskConfiguration.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskMetadata.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskOperators.cs create mode 100644 Sources/Data/Microsoft.Psi.Data/IBatchProcessingTask.cs rename Sources/{Visualization/Microsoft.Psi.Visualization.Windows/Base => Data/Microsoft.Psi.Data}/ObservableObject.cs (98%) create mode 100644 Sources/Imaging/Microsoft.Psi.Imaging.Windows/ImageToGZipStreamEncoder.cs delete mode 100644 Sources/Imaging/Microsoft.Psi.Imaging/ToPixelFormat.cs create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/HoloLens2ResearchMode.def create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/HoloLens2ResearchMode.vcxproj create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/HoloLens2ResearchMode.vcxproj.filters create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/PropertySheet.props create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/Readme.md create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeAccelFrame.cpp create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeAccelFrame.h create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeApi.h create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeCameraSensor.cpp create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeCameraSensor.h create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeGyroFrame.cpp create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeGyroFrame.h create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeImuSensor.cpp create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeImuSensor.h create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeMagFrame.cpp create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeMagFrame.h create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDepthFrame.cpp create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDepthFrame.h create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDevice.cpp create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDevice.h create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDevice.idl create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorVlcFrame.cpp create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorVlcFrame.h create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/packages.config create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/pch.cpp create mode 100644 Sources/MixedReality/HoloLens2ResearchMode/pch.h create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Accelerometer.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/DepthCamera.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/DepthCameraConfiguration.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Gyroscope.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ImageToGzipStreamEncoder.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ImageToJpegStreamEncoder.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Magnetometer.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Microsoft.Psi.MixedReality.UniversalWindows.csproj create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/MixedReality.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/MixedRealityCapturePerspective.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/MixedRealityCaptureVideoEffect.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Operators.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/PhotoVideoCamera.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/PhotoVideoCameraConfiguration.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Properties/AssemblyInfo.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Properties/Microsoft.Psi.MixedReality.UniversalWindows.rd.xml create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ResearchModeCamera.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ResearchModeImu.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SceneUnderstanding.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SceneUnderstandingConfiguration.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SpatialAnchorHelper.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SpatialAnchorsSource.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/UnsafeNative.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/VisibleLightCamera.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/VisibleLightCameraConfiguration.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/stylecop.json create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/HandVisualizationObject.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/HandVisualizationObjectAdapter.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/Microsoft.Psi.MixedReality.Visualization.Windows.csproj create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/stylecop.json create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/CalibrationPointsMap.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/EyesSensor.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/Hand.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/HandJointIndex.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/Handle.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/HandsSensor.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/HeadSensor.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/Microphone.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/MicrophoneConfiguration.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/Microsoft.Psi.MixedReality.csproj create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/Operators.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/Mesh3DListStereoKitRenderer.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/MeshStereoKitRenderer.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/ModelBasedStereoKitRenderer.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/Rectangle3DListStereoKitRenderer.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/SceneObjectCollection.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/SpatialSound.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/StereoKitComponent.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/StereoKitTransforms.cs create mode 100644 Sources/MixedReality/Microsoft.Psi.MixedReality/stylecop.json create mode 100644 Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/Operators.cs create mode 100644 Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/Readme.md create mode 100644 Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/Rendezvous.cs create mode 100644 Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/RendezvousClient.cs create mode 100644 Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/RendezvousClient.py create mode 100644 Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/RendezvousRelay.cs create mode 100644 Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/RendezvousServer.cs create mode 100644 Sources/Runtime/Microsoft.Psi.Interop/Serialization/Format{T}.cs create mode 100644 Sources/Runtime/Microsoft.Psi.Interop/Serialization/PersistentFormat.cs create mode 100644 Sources/Runtime/Microsoft.Psi.Interop/Transport/TcpSource.cs create mode 100644 Sources/Runtime/Microsoft.Psi.Interop/Transport/TcpWriter.cs create mode 100644 Sources/Runtime/Microsoft.Psi/Common/Interpolators/Interpolator{T}.cs create mode 100644 Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreMonitor.cs create mode 100644 Sources/Runtime/Microsoft.Psi/Remoting/RemoteClockExporter.cs create mode 100644 Sources/Runtime/Microsoft.Psi/Remoting/RemoteClockImporter.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Adapters.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Box3DListVisualizationObject.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Box3DVisualizationObject.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/DepthImageRectangle3DListVisualizationObject.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/DepthImageRectangle3DVisualizationObject.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/ImageRectangle3DVisualizationObject.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/LinearVelocity3DListVisualizationObject.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/LinearVelocity3DVisualizationObject.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Mesh3DListVisualizationObject.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Mesh3DVisualizationObject.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows.csproj create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/NumericalVoxelGridVisualizationObject.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/PointCloud3DVisualizationObject.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Rectangle3DListVisualizationObject.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Rectangle3DVisualizationObject.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/VoxelGridVisualizationObject.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/stylecop.json create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/AngularVelocity3D.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Bounds3D.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Box3D.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/CoordinateSystemVelocity3D.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/DepthImageRectangle3D.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/EncodedDepthImageRectangle3D.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/EncodedImageRectangle3D.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/ImageRectangle3D.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/Operators.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/LinearVelocity3D.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Mesh3D.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Microsoft.Psi.Spatial.Euclidean.csproj create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Operators.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/PointCloud3D.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Rectangle3D.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Voxel.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/VoxelGrid.cs create mode 100644 Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/stylecop.json create mode 100644 Sources/Tools/PsiStudio/Microsoft.Psi.PsiStudio/app.config create mode 100644 Sources/Visualization/Microsoft.Psi.LiveCharts.Visualization.Windows/XYPlotVisualizationObject.cs delete mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Adapters/BoolToDoubleAdapter.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Adapters/InterfaceAdapter.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Adapters/MathNetPoint2DToScatterPlotAdapter.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Adapters/NullablePoint2DListToScatterPlotAdapter.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Adapters/NullableRectangleAdapter.cs delete mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/BatchProcessingTasks/BatchProcessingTaskMetadata.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Common/TimelinePanelMousePosition.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Common/TimelineValueThreshold.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/DataTypes/Graph.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Helpers/TimeSpanFormatHelper.cs delete mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Icons/close-panel.png create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Icons/layout-delete.png create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Icons/panel-showhide-smallbutton-faded.png create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Icons/panel-showhide-smallbutton.png create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Icons/panel-xy.png rename Sources/Visualization/Microsoft.Psi.Visualization.Windows/Icons/{panel-3d.png => panel-xyz.png} (100%) create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Icons/remove-smallbutton-faded.png rename Sources/Visualization/Microsoft.Psi.Visualization.Windows/{Collections => Observables}/ObservableDataCollection{T}.cs (98%) rename Sources/Visualization/Microsoft.Psi.Visualization.Windows/{Collections => Observables}/ObservableDataItem{T}.cs (90%) rename Sources/Visualization/Microsoft.Psi.Visualization.Windows/{Collections => Observables}/ObservableKeyedCache{TKey,TItem}.cs (99%) rename Sources/Visualization/Microsoft.Psi.Visualization.Windows/{Collections => Observables}/ObservableSortedCollection{T}.cs (96%) rename Sources/Visualization/Microsoft.Psi.Visualization.Windows/{Base => Observables}/ObservableTreeNodeObject.cs (97%) create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Summarizers/Numeric/BoolRangeSummarizer.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Summarizers/Numeric/BoolSeriesRangeSummarizer.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Summarizers/Numeric/NullableBoolRangeSummarizer.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Summarizers/Numeric/NullableBoolSeriesRangeSummarizer.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/CanvasVisualizationPanelView.xaml create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/CanvasVisualizationPanelView.xaml.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/DepthImageVisualizationObjectViewBase.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/IFormattableVisualizationObjectView.xaml create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/IFormattableVisualizationObjectView.xaml.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/ImageVisualizationObjectViewBase.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/NumericPlot/BoolSeriesVisualizationObjectView.xaml create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/NumericPlot/BoolSeriesVisualizationObjectView.xaml.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/NumericPlot/BoolVisualizationObjectView.xaml create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/NumericPlot/BoolVisualizationObjectView.xaml.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/NumericPlot/NullableBoolSeriesVisualizationObjectView.xaml create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/NumericPlot/NullableBoolSeriesVisualizationObjectView.xaml.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/NumericPlot/NullableBoolSeriesVisualizationObjectViewBase.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/NumericPlot/NullableBoolVisualizationObjectView.xaml create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/NumericPlot/NullableBoolVisualizationObjectView.xaml.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/NumericPlot/NullableBoolVisualizationObjectViewBase.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/Visuals2D/TimeIntervalAnnotationTrackVisualizationObjectViewItem.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/IFormattableVisualizationObject.cs rename Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/{ModelVisual3DVisualizationObjectCollectionBase{TVisualizationObject,TData}.cs => ModelVisual3DCollectionVisualizationObject.cs} (91%) rename Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/{UpdatableModelVisual3DVisualizationObjectDictionary{TVisObj,TKey,TData}.cs => ModelVisual3DDictionaryVisualizationObject.cs} (83%) create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/ModelVisual3DGraphVisualizationObject.cs rename Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/{ModelVisual3DVisualizationObjectEnumerable{TVisObj,TData,TColl}.cs => ModelVisual3DListVisualizationObject.cs} (78%) rename Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/{ModelVisual3DVisualizationObject{TData}.cs => ModelVisual3DVisualizationObject.cs} (88%) create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/NumericPlot/BoolSeriesVisualizationObject.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/NumericPlot/BoolVisualizationObject.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/NumericPlot/NullableBoolSeriesVisualizationObject.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/NumericPlot/NullableBoolVisualizationObject.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/Point3DGraphVisualizationObject.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/SkeletonVisualizationObject.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/Text3DVisualizationObject.cs rename Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/{UpdatableVisual3DDictionary{TKey,TVisual}.cs => UpdatableVisual3DDictionary{TKey,TVisual3D}.cs} (87%) rename Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationObjects/{UpdatableVisual3DList{TVisual}.cs => UpdatableVisual3DList{TVisual3D}.cs} (91%) create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/VisualizationPanels/CanvasVisualizationPanel.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/GetParameterWindow.xaml create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/GetParameterWindow.xaml.cs create mode 100644 Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/RunBatchProcessingTaskWindowViewModel.cs diff --git a/Directory.Build.props b/Directory.Build.props index 338c90149..7b8affd68 100644 --- a/Directory.Build.props +++ b/Directory.Build.props @@ -6,7 +6,7 @@ Microsoft Corporation microsoft,psi Microsoft - 0.15.49.1 + 0.16.92.1 $(AssemblyVersion) $(AssemblyVersion)-beta false diff --git a/Psi.sln b/Psi.sln index 3fbc745d0..ea34bff28 100644 --- a/Psi.sln +++ b/Psi.sln @@ -198,6 +198,22 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Filters", "Filters", "{E0E7 EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Psi.Filters", "Sources\Filters\Microsoft.Psi.Filters\Microsoft.Psi.Filters.csproj", "{E0621435-AF35-4CFA-BE9E-3781AF6E161F}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Spatial", "Spatial", "{F36DEF23-4FFF-4237-9104-03CF19036C70}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Psi.Spatial.Euclidean", "Sources\Spatial\Microsoft.Psi.Spatial.Euclidean\Microsoft.Psi.Spatial.Euclidean.csproj", "{C3114338-AD22-4EBC-85C3-EE06045CDD78}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Psi.Spatial.Euclidean.Visualization.Windows", "Sources\Spatial\Microsoft.Psi.Spatial.Euclidean.Visualization.Windows\Microsoft.Psi.Spatial.Euclidean.Visualization.Windows.csproj", "{A1429F96-C7F8-49D8-ADB8-73A1A4DAA70F}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "MixedReality", "MixedReality", "{32023088-0392-4B48-B2CF-3754B55C6DE9}" +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "HoloLens2ResearchMode", "Sources\MixedReality\HoloLens2ResearchMode\HoloLens2ResearchMode.vcxproj", "{F50194C0-9561-40C7-B9CB-B977E3B3D76D}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Psi.MixedReality", "Sources\MixedReality\Microsoft.Psi.MixedReality\Microsoft.Psi.MixedReality.csproj", "{3434D5B2-B06F-4356-9E9B-90171CEF482B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.Psi.MixedReality.UniversalWindows", "Sources\MixedReality\Microsoft.Psi.MixedReality.UniversalWindows\Microsoft.Psi.MixedReality.UniversalWindows.csproj", "{ECD9E150-8104-4DA3-B807-A6A4392A67C6}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Psi.MixedReality.Visualization.Windows", "Sources\MixedReality\Microsoft.Psi.MixedReality.Visualization.Windows\Microsoft.Psi.MixedReality.Visualization.Windows.csproj", "{BE95524A-F9C2-4D0D-8F7E-1C7019B5A114}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -436,6 +452,30 @@ Global {E0621435-AF35-4CFA-BE9E-3781AF6E161F}.Debug|Any CPU.Build.0 = Debug|Any CPU {E0621435-AF35-4CFA-BE9E-3781AF6E161F}.Release|Any CPU.ActiveCfg = Release|Any CPU {E0621435-AF35-4CFA-BE9E-3781AF6E161F}.Release|Any CPU.Build.0 = Release|Any CPU + {C3114338-AD22-4EBC-85C3-EE06045CDD78}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C3114338-AD22-4EBC-85C3-EE06045CDD78}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C3114338-AD22-4EBC-85C3-EE06045CDD78}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C3114338-AD22-4EBC-85C3-EE06045CDD78}.Release|Any CPU.Build.0 = Release|Any CPU + {A1429F96-C7F8-49D8-ADB8-73A1A4DAA70F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A1429F96-C7F8-49D8-ADB8-73A1A4DAA70F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A1429F96-C7F8-49D8-ADB8-73A1A4DAA70F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A1429F96-C7F8-49D8-ADB8-73A1A4DAA70F}.Release|Any CPU.Build.0 = Release|Any CPU + {F50194C0-9561-40C7-B9CB-B977E3B3D76D}.Debug|Any CPU.ActiveCfg = Debug|ARM + {F50194C0-9561-40C7-B9CB-B977E3B3D76D}.Debug|Any CPU.Build.0 = Debug|ARM + {F50194C0-9561-40C7-B9CB-B977E3B3D76D}.Release|Any CPU.ActiveCfg = Release|ARM + {F50194C0-9561-40C7-B9CB-B977E3B3D76D}.Release|Any CPU.Build.0 = Release|ARM + {3434D5B2-B06F-4356-9E9B-90171CEF482B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3434D5B2-B06F-4356-9E9B-90171CEF482B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3434D5B2-B06F-4356-9E9B-90171CEF482B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3434D5B2-B06F-4356-9E9B-90171CEF482B}.Release|Any CPU.Build.0 = Release|Any CPU + {ECD9E150-8104-4DA3-B807-A6A4392A67C6}.Debug|Any CPU.ActiveCfg = Debug|ARM + {ECD9E150-8104-4DA3-B807-A6A4392A67C6}.Debug|Any CPU.Build.0 = Debug|ARM + {ECD9E150-8104-4DA3-B807-A6A4392A67C6}.Release|Any CPU.ActiveCfg = Release|ARM + {ECD9E150-8104-4DA3-B807-A6A4392A67C6}.Release|Any CPU.Build.0 = Release|ARM + {BE95524A-F9C2-4D0D-8F7E-1C7019B5A114}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BE95524A-F9C2-4D0D-8F7E-1C7019B5A114}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BE95524A-F9C2-4D0D-8F7E-1C7019B5A114}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BE95524A-F9C2-4D0D-8F7E-1C7019B5A114}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -522,6 +562,14 @@ Global {A0677BEA-ADB1-4950-89E6-89483D621A52} = {EE4035A8-CEFE-4E3A-9CD9-4AE7E88DA2C4} {E0E7957E-731F-4FDD-83FE-634FFE24862F} = {A0856299-D28A-4513-B964-3FA5290FF160} {E0621435-AF35-4CFA-BE9E-3781AF6E161F} = {E0E7957E-731F-4FDD-83FE-634FFE24862F} + {F36DEF23-4FFF-4237-9104-03CF19036C70} = {A0856299-D28A-4513-B964-3FA5290FF160} + {C3114338-AD22-4EBC-85C3-EE06045CDD78} = {F36DEF23-4FFF-4237-9104-03CF19036C70} + {A1429F96-C7F8-49D8-ADB8-73A1A4DAA70F} = {F36DEF23-4FFF-4237-9104-03CF19036C70} + {32023088-0392-4B48-B2CF-3754B55C6DE9} = {A0856299-D28A-4513-B964-3FA5290FF160} + {F50194C0-9561-40C7-B9CB-B977E3B3D76D} = {32023088-0392-4B48-B2CF-3754B55C6DE9} + {3434D5B2-B06F-4356-9E9B-90171CEF482B} = {32023088-0392-4B48-B2CF-3754B55C6DE9} + {ECD9E150-8104-4DA3-B807-A6A4392A67C6} = {32023088-0392-4B48-B2CF-3754B55C6DE9} + {BE95524A-F9C2-4D0D-8F7E-1C7019B5A114} = {32023088-0392-4B48-B2CF-3754B55C6DE9} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {EAF15EE9-DCC5-411B-A9E5-7C2F3D132331} diff --git a/Sources/Audio/Microsoft.Psi.Audio.Linux/AudioCapture.cs b/Sources/Audio/Microsoft.Psi.Audio.Linux/AudioCapture.cs index 24b19bfdd..f9873742e 100644 --- a/Sources/Audio/Microsoft.Psi.Audio.Linux/AudioCapture.cs +++ b/Sources/Audio/Microsoft.Psi.Audio.Linux/AudioCapture.cs @@ -158,7 +158,7 @@ public void Start(Action notifyCompletionTime) Array.Copy(buf, this.buffer.Data, length); // use the end of the last sample in the packet as the originating time - DateTime originatingTime = this.pipeline.GetCurrentTime().AddSeconds(length / format.AvgBytesPerSec); + DateTime originatingTime = this.pipeline.GetCurrentTime().AddSeconds((double)length / format.AvgBytesPerSec); // post the data to the output stream this.audioBuffers.Post(this.buffer, originatingTime); diff --git a/Sources/Audio/Microsoft.Psi.Audio.Windows/MFResampler.cs b/Sources/Audio/Microsoft.Psi.Audio.Windows/MFResampler.cs index 75fd19579..513fdd526 100644 --- a/Sources/Audio/Microsoft.Psi.Audio.Windows/MFResampler.cs +++ b/Sources/Audio/Microsoft.Psi.Audio.Windows/MFResampler.cs @@ -91,16 +91,32 @@ public void Initialize(int targetLatencyInMs, WaveFormat inFormat, WaveFormat ou this.inputBufferSize = (int)(this.bufferLengthInMs * inFormat.AvgBytesPerSec / 1000); this.outputBufferSize = (int)(this.bufferLengthInMs * outFormat.AvgBytesPerSec / 1000); + Exception taskException = null; + // Activate native Media Foundation COM objects on a thread-pool thread to ensure that they are in an MTA Task.Run(() => { - DeviceUtil.CreateResamplerBuffer(this.inputBufferSize, out this.inputSample, out this.inputBuffer); - DeviceUtil.CreateResamplerBuffer(this.outputBufferSize, out this.outputSample, out this.outputBuffer); + try + { + DeviceUtil.CreateResamplerBuffer(this.inputBufferSize, out this.inputSample, out this.inputBuffer); + DeviceUtil.CreateResamplerBuffer(this.outputBufferSize, out this.outputSample, out this.outputBuffer); - // Create resampler object - this.resampler = DeviceUtil.CreateResampler(inFormat, outFormat); + // Create resampler object + this.resampler = DeviceUtil.CreateResampler(inFormat, outFormat); + } + catch (Exception e) + { + taskException = e; + } }).Wait(); + // do error checking on the main thread + if (taskException != null) + { + // rethrow exception + throw taskException; + } + // Set the callback function this.dataAvailableCallback = callback; } diff --git a/Sources/Audio/Microsoft.Psi.Audio.Windows/Operators.cs b/Sources/Audio/Microsoft.Psi.Audio.Windows/Operators.cs index 9096684ea..85b778e16 100644 --- a/Sources/Audio/Microsoft.Psi.Audio.Windows/Operators.cs +++ b/Sources/Audio/Microsoft.Psi.Audio.Windows/Operators.cs @@ -3,8 +3,6 @@ namespace Microsoft.Psi.Audio { - using Microsoft.Psi.Audio; - /// /// Stream operators and extension methods for Microsoft.Psi.Audio.Windows. /// diff --git a/Sources/Audio/Microsoft.Psi.Audio.Windows/WasapiCapture.cs b/Sources/Audio/Microsoft.Psi.Audio.Windows/WasapiCapture.cs index ee2550fb5..cec581bf2 100644 --- a/Sources/Audio/Microsoft.Psi.Audio.Windows/WasapiCapture.cs +++ b/Sources/Audio/Microsoft.Psi.Audio.Windows/WasapiCapture.cs @@ -143,33 +143,47 @@ public void Dispose() /// public void Initialize(string deviceDescription) { + Exception taskException = null; + // Activate native audio COM objects on a thread-pool thread to ensure that they are in an MTA Task.Run(() => { - if (string.IsNullOrEmpty(deviceDescription)) + try { - // use the default console device - this.audioDevice = DeviceUtil.GetDefaultDevice(EDataFlow.Capture, ERole.Console); + if (string.IsNullOrEmpty(deviceDescription)) + { + // use the default console device + this.audioDevice = DeviceUtil.GetDefaultDevice(EDataFlow.Capture, ERole.Console); + } + else + { + this.audioDevice = DeviceUtil.GetDeviceByName(EDataFlow.Capture, deviceDescription); + } + + if (this.audioDevice != null) + { + // Try to get the volume control + object obj = this.audioDevice.Activate(new Guid(Guids.IAudioEndpointVolumeIIDString), ClsCtx.ALL, IntPtr.Zero); + this.volume = (IAudioEndpointVolume)obj; + + // Now create an IAudioEndpointVolumeCallback object that wraps the callback and register it with the endpoint. + this.volumeCallback = new AudioEndpointVolumeCallback(this.AudioVolumeCallback); + this.volume.RegisterControlChangeNotify(this.volumeCallback); + } } - else + catch (Exception e) { - this.audioDevice = DeviceUtil.GetDeviceByName(EDataFlow.Capture, deviceDescription); - } - - if (this.audioDevice != null) - { - // Try to get the volume control - object obj = this.audioDevice.Activate(new Guid(Guids.IAudioEndpointVolumeIIDString), ClsCtx.ALL, IntPtr.Zero); - this.volume = (IAudioEndpointVolume)obj; - - // Now create an IAudioEndpointVolumeCallback object that wraps the callback and register it with the endpoint. - this.volumeCallback = new AudioEndpointVolumeCallback(this.AudioVolumeCallback); - this.volume.RegisterControlChangeNotify(this.volumeCallback); + taskException = e; } }).Wait(); // do error checking on the main thread - if (this.audioDevice == null) + if (taskException != null) + { + // rethrow exception + throw taskException; + } + else if (this.audioDevice == null) { throw new IOException(string.IsNullOrEmpty(deviceDescription) ? "No default audio capture device found." : diff --git a/Sources/Audio/Microsoft.Psi.Audio.Windows/WasapiRender.cs b/Sources/Audio/Microsoft.Psi.Audio.Windows/WasapiRender.cs index 7f4a019e8..9e3d445cc 100644 --- a/Sources/Audio/Microsoft.Psi.Audio.Windows/WasapiRender.cs +++ b/Sources/Audio/Microsoft.Psi.Audio.Windows/WasapiRender.cs @@ -4,6 +4,7 @@ namespace Microsoft.Psi.Audio { using System; + using System.IO; using System.Runtime.InteropServices; using System.Threading.Tasks; using Microsoft.Psi.Audio.ComInterop; @@ -138,30 +139,52 @@ public void Dispose() /// public void Initialize(string deviceDescription) { + Exception taskException = null; + // Activate native audio COM objects on a thread-pool thread to ensure that they are in an MTA Task.Run(() => { - if (string.IsNullOrEmpty(deviceDescription)) + try { - // use the default console device - this.audioDevice = DeviceUtil.GetDefaultDevice(EDataFlow.Render, ERole.Console); + if (string.IsNullOrEmpty(deviceDescription)) + { + // use the default console device + this.audioDevice = DeviceUtil.GetDefaultDevice(EDataFlow.Render, ERole.Console); + } + else + { + this.audioDevice = DeviceUtil.GetDeviceByName(EDataFlow.Render, deviceDescription); + } + + if (this.audioDevice != null) + { + // Try to get the volume control + object obj = this.audioDevice.Activate(new Guid(Guids.IAudioEndpointVolumeIIDString), ClsCtx.ALL, IntPtr.Zero); + this.volume = (IAudioEndpointVolume)obj; + + // Now create an IAudioEndpointVolumeCallback object that wraps the callback and register it with the endpoint. + this.volumeCallback = new AudioEndpointVolumeCallback(this.AudioVolumeCallback); + this.volume.RegisterControlChangeNotify(this.volumeCallback); + } } - else + catch (Exception e) { - this.audioDevice = DeviceUtil.GetDeviceByName(EDataFlow.Render, deviceDescription); - } - - if (this.audioDevice != null) - { - // Try to get the volume control - object obj = this.audioDevice.Activate(new Guid(Guids.IAudioEndpointVolumeIIDString), ClsCtx.ALL, IntPtr.Zero); - this.volume = (IAudioEndpointVolume)obj; - - // Now create an IAudioEndpointVolumeCallback object that wraps the callback and register it with the endpoint. - this.volumeCallback = new AudioEndpointVolumeCallback(this.AudioVolumeCallback); - this.volume.RegisterControlChangeNotify(this.volumeCallback); + taskException = e; } }).Wait(); + + // do error checking on the main thread + if (taskException != null) + { + // rethrow exception + throw taskException; + } + else if (this.audioDevice == null) + { + throw new IOException(string.IsNullOrEmpty(deviceDescription) ? + "No default audio playback device found." : + $"Audio playback device {deviceDescription} not found."); + } } /// diff --git a/Sources/Audio/Microsoft.Psi.Audio/WaveFileHelper.cs b/Sources/Audio/Microsoft.Psi.Audio/WaveFileHelper.cs index 7665a5d35..6c2100c25 100644 --- a/Sources/Audio/Microsoft.Psi.Audio/WaveFileHelper.cs +++ b/Sources/Audio/Microsoft.Psi.Audio/WaveFileHelper.cs @@ -63,8 +63,18 @@ public static WaveFormat ReadWaveFileHeader(string filename) /// The number of byte of wave data that follow. public static long ReadWaveDataLength(BinaryReader br) { - if (Encoding.UTF8.GetString(BitConverter.GetBytes(br.ReadInt32())) != "data") + var name = Encoding.UTF8.GetString(BitConverter.GetBytes(br.ReadInt32())); + if (name != "data") { + if (name == "fact" || name == "LIST") + { + // Some formats (e.g. IEEE float) contain fact and LIST chunks (which we skip). + // see fhe "fact Chunk" section of the spec: http://www-mmsp.ece.mcgill.ca/Documents/AudioFormats/WAVE/WAVE.html + // "IEEE float data (introduced after the Rev. 3 documention) need a fact" + br.ReadBytes((int)br.ReadUInt32()); // skip + return ReadWaveDataLength(br); + } + throw new FormatException("Data header missing"); } diff --git a/Sources/Audio/Microsoft.Psi.Audio/WaveStreamSampleSource.cs b/Sources/Audio/Microsoft.Psi.Audio/WaveStreamSampleSource.cs new file mode 100644 index 000000000..b9fe35266 --- /dev/null +++ b/Sources/Audio/Microsoft.Psi.Audio/WaveStreamSampleSource.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Audio +{ + using System; + using System.IO; + using Microsoft.Psi.Components; + + /// + /// Component that produces on-demand an audio sample specified by a . + /// + /// + /// This is meant for relatively short sound effects cached in memory. + /// We consume the stream given at construction time; breaking it into + /// audio buffers which are "played" upon receiving a true input signal. + /// + public class WaveStreamSampleSource : IConsumerProducer + { + private readonly Pipeline pipeline; + private readonly AudioBuffer[] audioData; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// Audio stream in WAVE format (48KHz, 1-channel, IEEE Float). + public WaveStreamSampleSource(Pipeline pipeline, Stream stream) + { + this.pipeline = pipeline; + this.In = pipeline.CreateReceiver(this, this.Play, nameof(this.In)); + this.Out = pipeline.CreateEmitter(this, nameof(this.Out)); + + var reader = new BinaryReader(stream); + var inputFormat = WaveFileHelper.ReadWaveFileHeader(reader); + + // we don't do resampling or conversion (must be 1-channel, 48kHz, float32). + // convert offline if needed: ffmpeg -i foo.wav -f wav -acodec pcm_f32le -ar 48000 -ac 1 bar.wav + if (inputFormat.Channels != 1 || + inputFormat.SamplesPerSec != 48000 || + (inputFormat.FormatTag != WaveFormatTag.WAVE_FORMAT_IEEE_FLOAT && + inputFormat.FormatTag != WaveFormatTag.WAVE_FORMAT_EXTENSIBLE) || + inputFormat.BitsPerSample != 32) + { + throw new ArgumentException("Expected 1-channel, 48kHz, float32 audio format."); + } + + // break into 1 second audio buffers + var outputFormat = WaveFormat.CreateIeeeFloat(48000, 1); + var dataLength = WaveFileHelper.ReadWaveDataLength(reader); + + // stepping over this line computing frames (e.g. F10) in the debugger will throw - still trying to understand why + var frames = (int)Math.Ceiling((double)dataLength / (double)outputFormat.AvgBytesPerSec); + this.audioData = new AudioBuffer[frames]; + for (var i = 0; dataLength > 0; i++) + { + var count = (int)Math.Min(dataLength, outputFormat.AvgBytesPerSec); + var bytes = reader.ReadBytes(count); + this.audioData[i] = new AudioBuffer(bytes, outputFormat); + dataLength -= count; + } + } + + /// + /// Gets the receiver of a signal indicating whether to play a sound. + /// + public Receiver In { get; private set; } + + /// + /// Gets the stream of sound output. + /// + public Emitter Out { get; private set; } + + private void Play(bool play) + { + if (play) + { + var now = this.pipeline.GetCurrentTime(); + if (now < this.Out.LastEnvelope.OriginatingTime) + { + // overlapping with last time played (play after) + now = this.Out.LastEnvelope.OriginatingTime.AddTicks(1); + } + + for (var i = 0; i < this.audioData.Length; i++) + { + this.Out.Post(this.audioData[i], now + TimeSpan.FromSeconds(i)); + } + } + } + } +} diff --git a/Sources/Audio/Test.Psi.Audio/Test.Psi.Audio.csproj b/Sources/Audio/Test.Psi.Audio/Test.Psi.Audio.csproj index f146dbecf..0686c3ed6 100644 --- a/Sources/Audio/Test.Psi.Audio/Test.Psi.Audio.csproj +++ b/Sources/Audio/Test.Psi.Audio/Test.Psi.Audio.csproj @@ -31,7 +31,7 @@ all runtime; build; native; contentfiles; analyzers - + diff --git a/Sources/Calibration/Microsoft.Psi.Calibration/CalibrationExtensions.cs b/Sources/Calibration/Microsoft.Psi.Calibration/CalibrationExtensions.cs index a6726e2ca..e0053bc88 100644 --- a/Sources/Calibration/Microsoft.Psi.Calibration/CalibrationExtensions.cs +++ b/Sources/Calibration/Microsoft.Psi.Calibration/CalibrationExtensions.cs @@ -11,10 +11,136 @@ namespace Microsoft.Psi.Calibration using Microsoft.Psi.Imaging; /// - /// Provides various helper and extension methods for dealing with calibration objects, camera intrinsics, rotations, etc. + /// Provides various helper and extension methods for calibrating cameras, dealing with calibration objects, camera intrinsics, rotations, etc. /// public static class CalibrationExtensions { + /// + /// Estimate a camera's camera matrix and distortion coefficients, given a set of image points and + /// corresponding 3d camera points. The underlying calibration procedure utilizes Levenberg Marquardt + /// optimization to produce these estimates. + /// + /// 3d positions of the points in camera coordinates. + /// These points are *not* yet in the typically assumed \psi basis (X=Forward, Y=Left, Z=Up). + /// Instead, we assume that X and Y correspond to directions in the image plane, and Z corresponds to depth in the plane. + /// 2d positions of the points in the image. + /// Initial estimate of the camera matrix. + /// Initial estimate of distortion coefficients. + /// Estimated output camera matrix. + /// Estimated output distortion coefficients. + /// If false, print debugging information to the console. + /// The RMS (root mean squared) error of this computation. + public static double CalibrateCameraIntrinsics( + List cameraPoints, + List imagePoints, + Matrix initialCameraMatrix, + Vector initialDistortionCoefficients, + out Matrix cameraMatrix, + out Vector distortionCoefficients, + bool silent = true) + { + // pack parameters into vector + // parameters: fx, fy, cx, cy, k1, k2 = 6 parameters + var initialParameters = Vector.Build.Dense(6); + int pi = 0; + initialParameters[pi++] = initialCameraMatrix[0, 0]; // fx + initialParameters[pi++] = initialCameraMatrix[1, 1]; // fy + initialParameters[pi++] = initialCameraMatrix[0, 2]; // cx + initialParameters[pi++] = initialCameraMatrix[1, 2]; // cy + initialParameters[pi++] = initialDistortionCoefficients[0]; // k1 + initialParameters[pi++] = initialDistortionCoefficients[1]; // k2 + + var error = CalibrateCamera(cameraPoints, imagePoints, initialParameters, false, out var computedParameters, silent); + + // unpack parameters into the outputs + cameraMatrix = Matrix.Build.Dense(3, 3); + distortionCoefficients = Vector.Build.Dense(2); + + pi = 0; + cameraMatrix[0, 0] = computedParameters[pi++]; // fx + cameraMatrix[1, 1] = computedParameters[pi++]; // fy + cameraMatrix[0, 2] = computedParameters[pi++]; // cx + cameraMatrix[1, 2] = computedParameters[pi++]; // cy + distortionCoefficients[0] = computedParameters[pi++]; // k1 + distortionCoefficients[1] = computedParameters[pi++]; // k2 + + return error; + } + + /// + /// Estimate a camera's intrinsics (camera matrix + distortion coefficients) and extrinsics (rotation + translation) + /// given a set of image points and corresponding 3d world points. The underlying calibration procedure utilizes + /// Levenberg Marquardt optimization to produce these estimates. + /// + /// 3d positions of the points in world coordinates. + /// These points are *not* yet in the typically assumed \psi basis (X=Forward, Y=Left, Z=Up). + /// Instead, we assume that X and Y correspond to directions in the image plane, and Z corresponds to depth in the plane. + /// 2d positions of the points in the image. + /// Initial estimate of the camera matrix. + /// Initial estimate of distortion coefficients. + /// Estimated output camera matrix. + /// Estimated output distortion coefficients. + /// Estimated camera rotation. + /// Estimated camera translation. + /// If false, print debugging information to the console. + /// The RMS (root mean squared) error of this computation. + public static double CalibrateCameraIntrinsicsAndExtrinsics( + List worldPoints, + List imagePoints, + Matrix initialCameraMatrix, + Vector initialDistortionCoefficients, + out Matrix cameraMatrix, + out Vector distortionCoefficients, + out Vector rotation, + out Vector translation, + bool silent = true) + { + // Compute an initial rotation and translation with DLT algorithm + DLT(initialCameraMatrix, initialDistortionCoefficients, worldPoints, imagePoints, out var rotationMatrix, out var initialTranslation); + var initialRotation = MatrixToAxisAngle(rotationMatrix); + + // pack parameters into vector + // parameters: fx, fy, cx, cy, k1, k2, + 3 for rotation, 3 translation = 12 + var initialParameters = Vector.Build.Dense(12); + int pi = 0; + initialParameters[pi++] = initialCameraMatrix[0, 0]; // fx + initialParameters[pi++] = initialCameraMatrix[1, 1]; // fy + initialParameters[pi++] = initialCameraMatrix[0, 2]; // cx + initialParameters[pi++] = initialCameraMatrix[1, 2]; // cy + initialParameters[pi++] = initialDistortionCoefficients[0]; // k1 + initialParameters[pi++] = initialDistortionCoefficients[1]; // k2 + initialParameters[pi++] = initialRotation[0]; + initialParameters[pi++] = initialRotation[1]; + initialParameters[pi++] = initialRotation[2]; + initialParameters[pi++] = initialTranslation[0]; + initialParameters[pi++] = initialTranslation[1]; + initialParameters[pi++] = initialTranslation[2]; + + var error = CalibrateCamera(worldPoints, imagePoints, initialParameters, true, out var computedParameters, silent); + + // unpack parameters into the outputs + cameraMatrix = Matrix.Build.Dense(3, 3); + distortionCoefficients = Vector.Build.Dense(2); + rotation = Vector.Build.Dense(3); + translation = Vector.Build.Dense(3); + + pi = 0; + cameraMatrix[0, 0] = computedParameters[pi++]; // fx + cameraMatrix[1, 1] = computedParameters[pi++]; // fy + cameraMatrix[0, 2] = computedParameters[pi++]; // cx + cameraMatrix[1, 2] = computedParameters[pi++]; // cy + distortionCoefficients[0] = computedParameters[pi++]; // k1 + distortionCoefficients[1] = computedParameters[pi++]; // k2 + rotation[0] = computedParameters[pi++]; + rotation[1] = computedParameters[pi++]; + rotation[2] = computedParameters[pi++]; + translation[0] = computedParameters[pi++]; + translation[1] = computedParameters[pi++]; + translation[2] = computedParameters[pi++]; + + return error; + } + /// /// Construct a new object /// computed from image width/height and focal length. @@ -24,7 +150,7 @@ public static class CalibrationExtensions /// The focal length in the X dimension. /// The focal length in the Y dimension. /// A newly computed . - public static ICameraIntrinsics ComputeCameraIntrinsics( + public static ICameraIntrinsics CreateCameraIntrinsics( int imageWidth, int imageHeight, double focalLengthX, @@ -47,9 +173,9 @@ public static class CalibrationExtensions /// The focal length in the X dimension. /// The focal length in the Y dimension. /// A newly computed . - public static ICameraIntrinsics ComputeCameraIntrinsics(this ImageBase image, double focalLengthX, double focalLengthY) + public static ICameraIntrinsics CreateCameraIntrinsics(this ImageBase image, double focalLengthX, double focalLengthY) { - return ComputeCameraIntrinsics(image.Width, image.Height, focalLengthX, focalLengthY); + return CreateCameraIntrinsics(image.Width, image.Height, focalLengthX, focalLengthY); } /// @@ -62,7 +188,7 @@ public static ICameraIntrinsics ComputeCameraIntrinsics(this ImageBase image, do public static Point3D? ProjectToCameraSpace(IDepthDeviceCalibrationInfo depthDeviceCalibrationInfo, Point2D point2D, Shared depthImage) { var colorExtrinsicsInverse = depthDeviceCalibrationInfo.ColorPose; - var pointInCameraSpace = depthDeviceCalibrationInfo.ColorIntrinsics.ToCameraSpace(point2D, 1.0, true); + var pointInCameraSpace = depthDeviceCalibrationInfo.ColorIntrinsics.GetCameraSpacePosition(point2D, 1.0, true); double x = pointInCameraSpace.X * colorExtrinsicsInverse[0, 0] + pointInCameraSpace.Y * colorExtrinsicsInverse[0, 1] + pointInCameraSpace.Z * colorExtrinsicsInverse[0, 2] + colorExtrinsicsInverse[0, 3]; double y = pointInCameraSpace.X * colorExtrinsicsInverse[1, 0] + pointInCameraSpace.Y * colorExtrinsicsInverse[1, 1] + pointInCameraSpace.Z * colorExtrinsicsInverse[1, 2] + colorExtrinsicsInverse[1, 3]; double z = pointInCameraSpace.X * colorExtrinsicsInverse[2, 0] + pointInCameraSpace.Y * colorExtrinsicsInverse[2, 1] + pointInCameraSpace.Z * colorExtrinsicsInverse[2, 2] + colorExtrinsicsInverse[2, 3]; @@ -176,22 +302,21 @@ public static Matrix AxisAngleToMatrix(Vector vectorRotation) /// /// Input rotation matrix. /// Same rotation in axis-angle representation (L2-Norm of the vector represents angular distance). - public static Vector MatrixToAxisAngle(Matrix m) + /// An optional angle epsilon parameter used to determine when the specified matrix contains a zero-rotation. + public static Vector MatrixToAxisAngle(Matrix m, double epsilon = 0.01 * Math.PI / 180) { if (m.RowCount != 3 || m.ColumnCount != 3) { throw new InvalidOperationException("The input must be a valid 3x3 rotation matrix in order to compute its axis-angle representation."); } - double epsilon = 0.01; - // theta = arccos((Trace(m) - 1) / 2) double angle = Math.Acos((m.Trace() - 1.0) / 2.0); // Create the axis vector. var v = Vector.Build.Dense(3, 0); - if (angle < epsilon) + if (double.IsNaN(angle) || angle < epsilon) { // If the angular distance to rotate is 0, we just return a vector of all zeroes. return v; @@ -202,7 +327,7 @@ public static Vector MatrixToAxisAngle(Matrix m) v[1] = m[0, 2] - m[2, 0]; v[2] = m[1, 0] - m[0, 1]; - if (v.L2Norm() < epsilon) + if (v.L2Norm() < 0.0001) { // if the axis to rotate around has 0 length, we are in a singularity where the angle has to be 180 degrees. angle = Math.PI; @@ -272,12 +397,44 @@ public static Vector MatrixToAxisAngle(Matrix m) return v.Normalize(2) * angle; } + /// + /// Project a 3D point (x, y, z) into a camera space (u, v) given the camera matrix and distortion coefficients. + /// The 3D point is *not* yet in the typically assumed \psi basis (X=Forward, Y=Left, Z=Up). + /// Instead, X and Y correspond to the image plane X and Y directions, with Z as depth. + /// + /// The camera matrix. + /// The distortion coefficients of the camera. + /// Input 3D point (X and Y correspond to image dimensions, with Z as depth). + /// Projected 2D point (output). + public static void Project(Matrix cameraMatrix, Vector distCoeffs, Point3D point, out Point2D projectedPoint) + { + double xp = point.X / point.Z; + double yp = point.Y / point.Z; + + double fx = cameraMatrix[0, 0]; + double fy = cameraMatrix[1, 1]; + double cx = cameraMatrix[0, 2]; + double cy = cameraMatrix[1, 2]; + double k1 = distCoeffs[0]; + double k2 = distCoeffs[1]; + + // compute f(xp, yp) + double rsquared = xp * xp + yp * yp; + double g = 1 + k1 * rsquared + k2 * rsquared * rsquared; + double xpp = xp * g; + double ypp = yp * g; + projectedPoint = new Point2D(fx * xpp + cx, fy * ypp + cy); + } + private static float GetMeshDepthAtPoint(ICameraIntrinsics depthIntrinsics, DepthImage depthImage, Point3D point, bool undistort) { - Point2D depthSpacePoint = depthIntrinsics.ToPixelSpace(point, undistort); + if (!depthIntrinsics.TryGetPixelPosition(point, undistort, out var depthPixel)) + { + return float.NaN; + } - int x = (int)Math.Round(depthSpacePoint.X); - int y = (int)Math.Round(depthSpacePoint.Y); + int x = (int)Math.Round(depthPixel.X); + int y = (int)Math.Round(depthPixel.Y); if ((x < 0) || (x >= depthImage.Width) || (y < 0) || (y >= depthImage.Height)) { return float.NaN; @@ -292,5 +449,256 @@ private static float GetMeshDepthAtPoint(ICameraIntrinsics depthIntrinsics, Dept return (float)depth / 1000; } + + private static double CalibrateCamera( + List worldPoints, + List imagePoints, + Vector initialParameters, + bool computeExtrinsics, + out Vector outputParameters, + bool silent = true) + { + int numValues = worldPoints.Count; + + // create a new vector for computing and returning our final parameters + var parametersCount = initialParameters.Count; + outputParameters = Vector.Build.Dense(parametersCount); + initialParameters.CopyTo(outputParameters); + + // This is the function that gets passed to the Levenberg-Marquardt optimizer + Vector OptimizationFunction(Vector p) + { + // initialize the error vector + var fvec = Vector.Build.Dense(numValues * 2); // each component (x,y) is a separate entry + + // unpack parameters + int pi = 0; + + // camera matrix + var k = Matrix.Build.DenseIdentity(3, 3); + k[0, 0] = p[pi++]; // fx + k[1, 1] = p[pi++]; // fy + k[0, 2] = p[pi++]; // cx + k[1, 2] = p[pi++]; // cy + + // distortion coefficients + var d = Vector.Build.Dense(2, 0); + d[0] = p[pi++]; // k1 + d[1] = p[pi++]; // k2 + + Matrix rotationMatrix = null; + Vector translationVector = null; + + if (computeExtrinsics) + { + // If we are computing extrinsics, that means the world points are not in local + // camera coordinates, so we need to also compute rotation and translation + var r = Vector.Build.Dense(3); + r[0] = p[pi++]; + r[1] = p[pi++]; + r[2] = p[pi++]; + rotationMatrix = AxisAngleToMatrix(r); + + translationVector = Vector.Build.Dense(3); + translationVector[0] = p[pi++]; + translationVector[1] = p[pi++]; + translationVector[2] = p[pi++]; + } + + int fveci = 0; + for (int i = 0; i < numValues; i++) + { + Point3D cameraPoint; + if (computeExtrinsics) + { + // transform world point to local camera coordinates + var x = rotationMatrix * worldPoints[i].ToVector(); + x += translationVector; + cameraPoint = new Point3D(x[0], x[1], x[2]); + } + else + { + // world points are already in local camera coordinates + cameraPoint = worldPoints[i]; + } + + // fvec_i = y_i - f(x_i) + Project(k, d, cameraPoint, out Point2D projectedPoint); + + var imagePoint = imagePoints[i]; + fvec[fveci++] = imagePoint.X - projectedPoint.X; + fvec[fveci++] = imagePoint.Y - projectedPoint.Y; + } + + return fvec; + } + + // optimize + var calibrate = new LevenbergMarquardt(OptimizationFunction); + while (calibrate.State == LevenbergMarquardt.States.Running) + { + var rmsError = calibrate.MinimizeOneStep(outputParameters); + if (!silent) + { + Console.WriteLine("rms error = " + rmsError); + } + } + + if (!silent) + { + for (int i = 0; i < parametersCount; i++) + { + Console.WriteLine(outputParameters[i] + "\t"); + } + + Console.WriteLine(); + } + + return calibrate.RMSError; + } + + // Use DLT to obtain estimate of calibration rig pose. + // This pose estimate will provide a good initial estimate for subsequent projector calibration. + // Note for a full PnP solution we should probably refine with Levenberg-Marquardt. + // DLT is described in Hartley and Zisserman p. 178 + private static void DLT(Matrix cameraMatrix, Vector distCoeffs, List worldPoints, List imagePoints, out Matrix rotationMatrix, out Vector translationVector) + { + int n = worldPoints.Count; + + var matrixA = Matrix.Build.Dense(2 * n, 12); + + for (int j = 0; j < n; j++) + { + var worldPoint = worldPoints[j]; + var imagePoint = imagePoints[j]; + + Undistort(cameraMatrix, distCoeffs, imagePoint, out Point2D undistortedPoint); + + int ii = 2 * j; + matrixA[ii, 4] = -worldPoint.X; + matrixA[ii, 5] = -worldPoint.Y; + matrixA[ii, 6] = -worldPoint.Z; + matrixA[ii, 7] = -1; + + matrixA[ii, 8] = undistortedPoint.Y * worldPoint.X; + matrixA[ii, 9] = undistortedPoint.Y * worldPoint.Y; + matrixA[ii, 10] = undistortedPoint.Y * worldPoint.Z; + matrixA[ii, 11] = undistortedPoint.Y; + + ii++; // next row + matrixA[ii, 0] = worldPoint.X; + matrixA[ii, 1] = worldPoint.Y; + matrixA[ii, 2] = worldPoint.Z; + matrixA[ii, 3] = 1; + + matrixA[ii, 8] = -undistortedPoint.X * worldPoint.X; + matrixA[ii, 9] = -undistortedPoint.X * worldPoint.Y; + matrixA[ii, 10] = -undistortedPoint.X * worldPoint.Z; + matrixA[ii, 11] = -undistortedPoint.X; + } + + // Find the eigenvector of ATA with the smallest eignvalue + var smallestEigenvector = Vector.Build.Dense(12); + var matrixATransposeA = matrixA.TransposeThisAndMultiply(matrixA); + matrixATransposeA.Evd().EigenVectors.Column(0).CopyTo(smallestEigenvector); + + // reshape into 3x4 projection matrix + var p = Matrix.Build.Dense(3, 4); + { + for (int i = 0; i < 3; i++) + { + for (int j = 0; j < 4; j++) + { + p[i, j] = smallestEigenvector[i * 4 + j]; + } + } + } + + rotationMatrix = Matrix.Build.Dense(3, 3); + for (int i = 0; i < 3; i++) + { + for (int j = 0; j < 3; j++) + { + rotationMatrix[i, j] = p[i, j]; + } + } + + if (rotationMatrix.Determinant() < 0) + { + rotationMatrix *= -1; + p *= -1; + } + + // orthogonalize R + { + var svd = rotationMatrix.Svd(); + rotationMatrix = svd.U * svd.VT; + } + + // determine scale factor + var rp = Matrix.Build.Dense(3, 3); + for (int i = 0; i < 3; i++) + { + for (int j = 0; j < 3; j++) + { + rp[i, j] = p[i, j]; + } + } + + double s = rp.L2Norm() / rotationMatrix.L2Norm(); + + translationVector = Vector.Build.Dense(3); + for (int i = 0; i < 3; i++) + { + translationVector[i] = p[i, 3]; + } + + translationVector *= 1.0 / s; + } + + private static void Undistort(Matrix cameraMatrix, Vector distCoeffs, Point2D pointIn, out Point2D pointOut) + { + float fx = (float)cameraMatrix[0, 0]; + float fy = (float)cameraMatrix[1, 1]; + float cx = (float)cameraMatrix[0, 2]; + float cy = (float)cameraMatrix[1, 2]; + float[] kappa = new float[] { (float)distCoeffs[0], (float)distCoeffs[1] }; + Undistort(fx, fy, cx, cy, kappa, pointIn, out pointOut); + } + + private static void Undistort(float fx, float fy, float cx, float cy, float[] kappa, Point2D pointIn, out Point2D pointOut) + { + // maps coords in undistorted image (xin, yin) to coords in distorted image (xout, yout) + double x = (pointIn.X - cx) / fx; + double y = (pointIn.Y - cy) / fy; // chances are you will need to flip y before passing in: imageHeight - yin + + // Newton Raphson + double ru = Math.Sqrt(x * x + y * y); + double rdest = ru; + double factor = 1.0; + + bool converged = false; + for (int j = 0; (j < 100) && !converged; j++) + { + double rdest2 = rdest * rdest; + double denom = 1.0; + double rk = 1.0; + + factor = 1.0; + for (int k = 0; k < 2; k++) + { + rk *= rdest2; + factor += kappa[k] * rk; + denom += (2.0 * k + 3.0) * kappa[k] * rk; + } + + double num = rdest * factor - ru; + rdest -= num / denom; + + converged = (num / denom) < 0.0001; + } + + pointOut = new Point2D(x / factor, y / factor); + } } } diff --git a/Sources/Calibration/Microsoft.Psi.Calibration/CameraIntrinsics.cs b/Sources/Calibration/Microsoft.Psi.Calibration/CameraIntrinsics.cs index 2e183f880..4ce80c235 100644 --- a/Sources/Calibration/Microsoft.Psi.Calibration/CameraIntrinsics.cs +++ b/Sources/Calibration/Microsoft.Psi.Calibration/CameraIntrinsics.cs @@ -11,13 +11,16 @@ namespace Microsoft.Psi.Calibration /// /// CameraIntrinsics defines the intrinsic properties for a given camera. /// - public class CameraIntrinsics : ICameraIntrinsics + public class CameraIntrinsics : ICameraIntrinsics, IEquatable { private Matrix transform; [OptionalField] private bool closedFormDistorts; + [OptionalField] + private DepthPixelSemantics depthPixelSemantics; + /// /// Initializes a new instance of the class. /// @@ -27,13 +30,15 @@ public class CameraIntrinsics : ICameraIntrinsics /// The radial distortion parameters (up to 6). /// The tangential distortion parameters (up to 2). /// Indicates which direction the closed form equation for Brown-Conrady Distortion model goes. I.e. does it perform distortion or undistortion. Default is to distort (thus making projection simpler and unprojection more complicated). + /// Defines how depth pixel values should be interpreted. public CameraIntrinsics( int imageWidth, int imageHeight, Matrix transform, Vector radialDistortion = null, Vector tangentialDistortion = null, - bool closedFormDistorts = true) + bool closedFormDistorts = true, + DepthPixelSemantics depthPixelSemantics = DepthPixelSemantics.DistanceToPlane) { this.ImageWidth = imageWidth; this.ImageHeight = imageHeight; @@ -59,6 +64,7 @@ public class CameraIntrinsics : ICameraIntrinsics this.FocalLengthXY = new Point2D(this.Transform[0, 0], this.Transform[1, 1]); this.PrincipalPoint = new Point2D(this.Transform[0, 2], this.Transform[1, 2]); this.ClosedFormDistorts = closedFormDistorts; + this.depthPixelSemantics = depthPixelSemantics; } /// @@ -108,47 +114,105 @@ private set } } + /// + /// Gets pixel semantics. + /// + public DepthPixelSemantics DepthPixelSemantics + { + get + { + return this.depthPixelSemantics; + } + + private set + { + this.depthPixelSemantics = value; + } + } + /// public int ImageWidth { get; private set; } /// public int ImageHeight { get; private set; } + /// + /// Returns a value indicating whether the specified camera intrinsics are the same. + /// + /// The first camera intrinsics. + /// The second camera intrinsics. + /// True if the camera intrinsics are the same; otherwise false. + public static bool operator ==(CameraIntrinsics left, CameraIntrinsics right) + { + return left.Equals(right); + } + + /// + /// Returns a value indicating whether the specified camera intrinsics are different. + /// + /// The first camera intrinsics. + /// The second camera intrinsics. + /// True if camera intrinsics are different; otherwise false. + public static bool operator !=(CameraIntrinsics left, CameraIntrinsics right) + { + return !left.Equals(right); + } + /// - public Point2D ToPixelSpace(Point3D pt, bool distort) + public Point2D? GetPixelPosition(Point3D point3D, bool distort, bool nullIfOutsideFieldOfView = true) { // X points in the depth dimension. Y points to the left, and Z points up. - Point2D pixelPt = new Point2D(-pt.Y / pt.X, -pt.Z / pt.X); + var point2D = new Point2D(-point3D.Y / point3D.X, -point3D.Z / point3D.X); if (distort) { - this.DistortPoint(pixelPt, out pixelPt); + this.TryDistortPoint(point2D, out point2D); } - Point3D tmp = new Point3D(pixelPt.X, pixelPt.Y, 1.0); + var tmp = new Point3D(point2D.X, point2D.Y, 1.0); tmp = tmp.TransformBy(this.transform); + + if (nullIfOutsideFieldOfView && (tmp.X < 0 || this.ImageWidth <= tmp.X || tmp.Y < 0 || this.ImageHeight <= tmp.Y)) + { + return null; + } + return new Point2D(tmp.X, tmp.Y); } + /// + public bool TryGetPixelPosition(Point3D point3D, bool distort, out Point2D pixelPosition, bool nullIfOutsideFieldOfView = true) + { + var point2D = this.GetPixelPosition(point3D, distort, nullIfOutsideFieldOfView); + pixelPosition = point2D.HasValue ? point2D.Value : default; + return point2D.HasValue; + } + /// - public Point3D ToCameraSpace(Point2D pt, double depth, bool undistort) + public Point3D GetCameraSpacePosition(Point2D point2D, double depth, bool undistort) { // Convert from pixel coordinates to NDC - Point3D tmp = new Point3D(pt.X, pt.Y, 1.0); + var tmp = new Point3D(point2D.X, point2D.Y, 1.0); tmp = tmp.TransformBy(this.InvTransform); // Distort the pixel - Point2D pixelPt = new Point2D(tmp.X, tmp.Y); + var pixelPoint2D = new Point2D(tmp.X, tmp.Y); if (undistort) { - this.UndistortPoint(pixelPt, out pixelPt); + this.TryUndistortPoint(pixelPoint2D, out pixelPoint2D); + } + + if (this.depthPixelSemantics == DepthPixelSemantics.DistanceToPoint) + { + double norm = Math.Sqrt(pixelPoint2D.X * pixelPoint2D.X + pixelPoint2D.Y * pixelPoint2D.Y + 1); + depth /= norm; } // X points in the depth dimension. Y points to the left, and Z points up. - return new Point3D(depth, -pixelPt.X * depth, -pixelPt.Y * depth); + return new Point3D(depth, -pixelPoint2D.X * depth, -pixelPoint2D.Y * depth); } /// - public bool UndistortPoint(Point2D distortedPt, out Point2D undistortedPt) + public bool TryUndistortPoint(Point2D distortedPt, out Point2D undistortedPt) { if (this.ClosedFormDistorts) { @@ -159,7 +223,7 @@ public bool UndistortPoint(Point2D distortedPt, out Point2D undistortedPt) } /// - public bool DistortPoint(Point2D undistortedPt, out Point2D distortedPt) + public bool TryDistortPoint(Point2D undistortedPt, out Point2D distortedPt) { if (this.ClosedFormDistorts) { @@ -169,6 +233,75 @@ public bool DistortPoint(Point2D undistortedPt, out Point2D distortedPt) return this.InverseOfClosedForm(undistortedPt, out distortedPt); } + /// + public Point3D[,] GetPixelToCameraSpaceMapping(bool undistort) + { + var result = new Point3D[this.ImageWidth, this.ImageHeight]; + for (int i = 0; i < this.ImageWidth; i++) + { + for (int j = 0; j < this.ImageHeight; j++) + { + // Convert from pixel coordinates to NDC + var tmp = new Point3D(i, j, 1.0); + tmp = tmp.TransformBy(this.InvTransform); + + // Distort the pixel + var pixelPoint2D = new Point2D(tmp.X, tmp.Y); + if (undistort) + { + this.TryUndistortPoint(pixelPoint2D, out pixelPoint2D); + } + + if (this.depthPixelSemantics == DepthPixelSemantics.DistanceToPoint) + { + double norm = Math.Sqrt(pixelPoint2D.X * pixelPoint2D.X + pixelPoint2D.Y * pixelPoint2D.Y + 1); + result[i, j] = new Point3D(1 / norm, -pixelPoint2D.X / norm, -pixelPoint2D.Y / norm); + } + else + { + result[i, j] = new Point3D(1, -pixelPoint2D.X, -pixelPoint2D.Y); + } + } + } + + return result; + } + + /// + public override int GetHashCode() + { + var hashCode = default(HashCode); + hashCode.Add(this.depthPixelSemantics); + hashCode.Add(this.ClosedFormDistorts); + hashCode.Add(this.FocalLengthXY); + hashCode.Add(this.ImageHeight); + hashCode.Add(this.ImageWidth); + hashCode.Add(this.PrincipalPoint); + hashCode.Add(this.RadialDistortion); + hashCode.Add(this.TangentialDistortion); + hashCode.Add(this.Transform); + return hashCode.ToHashCode(); + } + + /// + public override bool Equals(object obj) => obj is CameraIntrinsics other && this.Equals(other); + + /// + public bool Equals(ICameraIntrinsics other) => + other is CameraIntrinsics cameraIntrinsics && + Equals(this.depthPixelSemantics, cameraIntrinsics.depthPixelSemantics) && + Equals(this.ClosedFormDistorts, cameraIntrinsics.ClosedFormDistorts) && + Equals(this.FocalLengthXY, cameraIntrinsics.FocalLengthXY) && + Equals(this.ImageHeight, cameraIntrinsics.ImageHeight) && + Equals(this.ImageWidth, cameraIntrinsics.ImageWidth) && + Equals(this.PrincipalPoint, cameraIntrinsics.PrincipalPoint) && + Equals(this.RadialDistortion, cameraIntrinsics.RadialDistortion) && + Equals(this.TangentialDistortion, cameraIntrinsics.TangentialDistortion) && + Equals(this.Transform, cameraIntrinsics.Transform); + + /// + public bool Equals(CameraIntrinsics other) => this.Equals((ICameraIntrinsics)other); + private bool InverseOfClosedForm(Point2D inputPt, out Point2D outputPt) { double k1 = this.RadialDistortion[0]; @@ -267,8 +400,8 @@ private bool InverseOfClosedForm(Point2D inputPt, out Point2D outputPt) } // Update our new guess (i.e. x = x - J(F(x))^-1 * F(x)) - x = x - ((dFydy * errx) - (dFxdy * erry)) / det; - y = y - ((-dFydx * errx) + (dFxdx * erry)) / det; + x -= ((dFydy * errx) - (dFxdy * erry)) / det; + y -= ((-dFydx * errx) + (dFxdx * erry)) / det; #pragma warning restore SA1305 } diff --git a/Sources/Calibration/Microsoft.Psi.Calibration/DepthDeviceCalibrationInfo.cs b/Sources/Calibration/Microsoft.Psi.Calibration/DepthDeviceCalibrationInfo.cs index d88ab6d24..e71defd6b 100644 --- a/Sources/Calibration/Microsoft.Psi.Calibration/DepthDeviceCalibrationInfo.cs +++ b/Sources/Calibration/Microsoft.Psi.Calibration/DepthDeviceCalibrationInfo.cs @@ -92,13 +92,21 @@ public DepthDeviceCalibrationInfo() public ICameraIntrinsics DepthIntrinsics { get; } /// - public Point2D ToColorSpace(Point3D point3D) + public Point2D? GetPixelPosition(Point3D point3D, bool nullIfOutsideFieldOfView = true) { // First convert the point into camera coordinates. var point3DInColorCamera = this.ColorExtrinsics.Transform(point3D); // Then convert to pixel space. - return this.ColorIntrinsics.ToPixelSpace(point3DInColorCamera, true); + return this.ColorIntrinsics.GetPixelPosition(point3DInColorCamera, true, nullIfOutsideFieldOfView); + } + + /// + public bool TryGetPixelPosition(Point3D point3D, out Point2D pixelPosition, bool nullIfOutsideFieldOfView = true) + { + var point2D = this.GetPixelPosition(point3D, nullIfOutsideFieldOfView); + pixelPosition = point2D.HasValue ? point2D.Value : default; + return point2D.HasValue; } } } diff --git a/Sources/Calibration/Microsoft.Psi.Calibration/DepthPixelSemantics.cs b/Sources/Calibration/Microsoft.Psi.Calibration/DepthPixelSemantics.cs new file mode 100644 index 000000000..fb6715e05 --- /dev/null +++ b/Sources/Calibration/Microsoft.Psi.Calibration/DepthPixelSemantics.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Calibration +{ + /// + /// Defines how depth pixel values should be interpreted. + /// + public enum DepthPixelSemantics + { + /// + /// The depth value indicates the distance to a plane perpendicular + /// to the camera's pointing direction. + /// + DistanceToPlane = 0, + + /// + /// The depth value indicates the euclidean distance directly to + /// the point in space. + /// + DistanceToPoint = 1, + } +} diff --git a/Sources/Calibration/Microsoft.Psi.Calibration/ICameraIntrinsics.cs b/Sources/Calibration/Microsoft.Psi.Calibration/ICameraIntrinsics.cs index 831bef178..ac660f29c 100644 --- a/Sources/Calibration/Microsoft.Psi.Calibration/ICameraIntrinsics.cs +++ b/Sources/Calibration/Microsoft.Psi.Calibration/ICameraIntrinsics.cs @@ -3,6 +3,7 @@ namespace Microsoft.Psi.Calibration { + using System; using MathNet.Numerics.LinearAlgebra; using MathNet.Spatial.Euclidean; @@ -11,7 +12,7 @@ namespace Microsoft.Psi.Calibration /// for a camera (i.e. converting from camera space coordinates into pixel /// coordinates). /// - public interface ICameraIntrinsics + public interface ICameraIntrinsics : IEquatable { /// /// Gets the intrinsics matrix. This transform converts camera coordinates (in the camera's local space) into @@ -69,21 +70,43 @@ public interface ICameraIntrinsics int ImageHeight { get; } /// - /// Projects a 3D point into the pixel space. + /// Gets the corresponding pixel position for a point in 3D space. /// /// Point in 3D space, assuming MathNet basis (Forward=X, Left=Y, Up=Z). /// Indicates whether to apply distortion. - /// Point in pixel space. - Point2D ToPixelSpace(Point3D point3D, bool distort); + /// Optional flag indicating whether to return null if point is outside the field of view (default true). + /// Point containing the pixel position. + Point2D? GetPixelPosition(Point3D point3D, bool distort, bool nullIfOutsideFieldOfView = true); /// - /// Unprojects a point from pixel space into 3D space. + /// Gets the corresponding pixel position for a point in 3D space. /// - /// Point in pixel space. - /// Depth at pixel. + /// Point in 3D space, assuming MathNet basis (Forward=X, Left=Y, Up=Z). + /// Indicates whether to apply distortion. + /// Output point containing the pixel position. + /// Optional flag indicating whether to return null if point is outside the field of view (default true). + /// True if is within field of view, otherwise false. + bool TryGetPixelPosition(Point3D point3D, bool distort, out Point2D pixelPosition, bool nullIfOutsideFieldOfView = true); + + /// + /// Gets the corresponding 3D camera space position at a given depth along a specified pixel. + /// + /// The pixel position. + /// The depth along the specified pixel position. /// Indicates whether to apply undistortion. /// Point in 3D space, assuming MathNet basis (Forward=X, Left=Y, Up=Z). - Point3D ToCameraSpace(Point2D point2D, double depth, bool undistort); + Point3D GetCameraSpacePosition(Point2D point2D, double depth, bool undistort); + + /// + /// Gets a mapping matrix that can be used to transform pixels into 3D space. + /// + /// Indicates whether to apply undistortion. + /// + /// A matrix of 3D points that can be used to transform depth values at a specified pixel + /// into 3D space. To use this matrix simply piecewise multiply the depth value by the X + /// Y and Z dimensions of the in the matrix at the location indexed + /// by the pixel. + public Point3D[,] GetPixelToCameraSpaceMapping(bool undistort); /// /// Applies the distortion model to a point in the camera post-projection coordinates. @@ -101,10 +124,10 @@ public interface ICameraIntrinsics /// Newton's method is used to find the inverse of this. That is /// Xd(n+1) = Xd(n) + J^-1 * F(Xd,Yd). /// - /// The undistorted point in camera post-projection coordinates. - /// The distorted point. - /// True if 'distortedPt' contains the distorted point, or false if the algorithm did not converge. - bool DistortPoint(Point2D undistortedPt, out Point2D distortedPt); + /// The undistorted point in camera post-projection coordinates. + /// The distorted point. + /// True if contains the distorted point, or false if the algorithm did not converge. + bool TryDistortPoint(Point2D undistortedPoint, out Point2D distortedPoint); /// /// Applies the camera's radial and tangential undistortion to the specified (distorted) point. @@ -119,9 +142,9 @@ public interface ICameraIntrinsics /// T0,T1 - tangential distortion coefficients. /// /// - /// Distorted point in camera post-projection coordinates. - /// Returns the undistorted point in camera post-projection coordinates. - /// True if 'undistortedPoint' contains the undistorted point, or false if the algorithm did not converge. - bool UndistortPoint(Point2D distortedPt, out Point2D undistortedPt); + /// Distorted point in camera post-projection coordinates. + /// Returns the undistorted point in camera post-projection coordinates. + /// True if contains the undistorted point, or false if the algorithm did not converge. + bool TryUndistortPoint(Point2D distortedPoint, out Point2D undistortedPoint); } } diff --git a/Sources/Calibration/Microsoft.Psi.Calibration/IDepthDeviceCalibrationInfo.cs b/Sources/Calibration/Microsoft.Psi.Calibration/IDepthDeviceCalibrationInfo.cs index e52d1e3ae..0612db70d 100644 --- a/Sources/Calibration/Microsoft.Psi.Calibration/IDepthDeviceCalibrationInfo.cs +++ b/Sources/Calibration/Microsoft.Psi.Calibration/IDepthDeviceCalibrationInfo.cs @@ -44,7 +44,17 @@ public interface IDepthDeviceCalibrationInfo /// Converts a 3D point from depth camera coordinates into color image coordinates. /// /// The 3D point in depth camera coordinates. + /// Optional flag indicating whether to return null if point is outside the field of view (default true). /// The 2D point in color image space. - Point2D ToColorSpace(Point3D point3D); + Point2D? GetPixelPosition(Point3D point3D, bool nullIfOutsideFieldOfView = true); + + /// + /// Converts a 3D point from depth camera coordinates into color image coordinates. + /// + /// The 3D point in depth camera coordinates. + /// Output point containing the pixel position. + /// Optional flag indicating whether to return null if point is outside the field of view (default true). + /// True if is within field of view, otherwise false. + bool TryGetPixelPosition(Point3D point3D, out Point2D pixelPosition, bool nullIfOutsideFieldOfView = true); } } diff --git a/Sources/Calibration/Microsoft.Psi.Calibration/Microsoft.Psi.Calibration.csproj b/Sources/Calibration/Microsoft.Psi.Calibration/Microsoft.Psi.Calibration.csproj index 1dc01080a..6137dab33 100644 --- a/Sources/Calibration/Microsoft.Psi.Calibration/Microsoft.Psi.Calibration.csproj +++ b/Sources/Calibration/Microsoft.Psi.Calibration/Microsoft.Psi.Calibration.csproj @@ -32,6 +32,7 @@ + diff --git a/Sources/Calibration/Test.Psi.Calibration/DistortionTests.cs b/Sources/Calibration/Test.Psi.Calibration/DistortionTests.cs index 4bb3513c1..fb2d0bcad 100644 --- a/Sources/Calibration/Test.Psi.Calibration/DistortionTests.cs +++ b/Sources/Calibration/Test.Psi.Calibration/DistortionTests.cs @@ -66,6 +66,7 @@ public void TestDistortion() unsafe { byte* row = (byte*)img.ImageData.ToPointer(); + var bytesPerPixel = img.BitsPerPixel / 8; for (int i = 0; i < img.Height; i++) { byte* col = row; @@ -84,7 +85,7 @@ public void TestDistortion() col[2] = (byte)(255.0f * (1.0f - (float)i / (float)img.Height)); } - col += img.BitsPerPixel / 8; + col += bytesPerPixel; } row += img.Stride; @@ -158,6 +159,8 @@ public void TestDistortion() unsafe { byte* dstrow = (byte*)distortedImage.ImageData.ToPointer(); + var imgBytesPerPixel = img.BitsPerPixel / 8; + var distortedImageBytesPerPixel = distortedImage.BitsPerPixel / 8; for (int i = 0; i < distortedImage.Height; i++) { byte* dstcol = dstrow; @@ -168,19 +171,19 @@ public void TestDistortion() ((float)i - ci.PrincipalPoint.Y) / ci.FocalLengthXY.Y); Point2D undistortedPoint; - bool converged = ci.UndistortPoint(pixelCoord, out undistortedPoint); + bool converged = ci.TryUndistortPoint(pixelCoord, out undistortedPoint); int px = (int)(undistortedPoint.X * ci.FocalLengthXY.X + ci.PrincipalPoint.X); int py = (int)(undistortedPoint.Y * ci.FocalLengthXY.Y + ci.PrincipalPoint.Y); if (converged && px >= 0 && px < img.Width && py >= 0 && py < img.Height) { - byte* src = (byte*)img.ImageData.ToPointer() + py * img.Stride + px * img.BitsPerPixel / 8; + byte* src = (byte*)img.ImageData.ToPointer() + py * img.Stride + px * imgBytesPerPixel; dstcol[0] = src[0]; dstcol[1] = src[1]; dstcol[2] = src[2]; } - dstcol += distortedImage.BitsPerPixel / 8; + dstcol += distortedImageBytesPerPixel; } dstrow += distortedImage.Stride; @@ -197,6 +200,8 @@ public void TestDistortion() double err = 0.0; int numPts = 0; byte* dstrow = (byte*)undistortedImage.ImageData.ToPointer(); + var imgBytesPerPixel = img.BitsPerPixel / 8; + var undistortedImageBytesPerPixel = undistortedImage.BitsPerPixel / 8; for (int i = 0; i < undistortedImage.Height; i++) { byte* dstcol = dstrow; @@ -206,18 +211,18 @@ public void TestDistortion() ((float)j - ci.PrincipalPoint.X) / ci.FocalLengthXY.X, ((float)i - ci.PrincipalPoint.Y) / ci.FocalLengthXY.Y); MathNet.Spatial.Euclidean.Point2D distortedPixelCoord, undistortedPixelCoord; - ci.DistortPoint(pixelCoord, out distortedPixelCoord); - bool converged = ci.UndistortPoint(distortedPixelCoord, out undistortedPixelCoord); + ci.TryDistortPoint(pixelCoord, out distortedPixelCoord); + bool converged = ci.TryUndistortPoint(distortedPixelCoord, out undistortedPixelCoord); int px = (int)(undistortedPixelCoord.X * ci.FocalLengthXY.X + ci.PrincipalPoint.X); int py = (int)(undistortedPixelCoord.Y * ci.FocalLengthXY.Y + ci.PrincipalPoint.Y); if (converged && px >= 0 && px < img.Width && py >= 0 && py < img.Height) { - byte* src = (byte*)img.ImageData.ToPointer() + py * img.Stride + px * img.BitsPerPixel / 8; + byte* src = (byte*)img.ImageData.ToPointer() + py * img.Stride + px * imgBytesPerPixel; dstcol[0] = src[0]; dstcol[1] = src[1]; dstcol[2] = src[2]; - byte* src2 = (byte*)img.ImageData.ToPointer() + i * img.Stride + j * img.BitsPerPixel / 8; + byte* src2 = (byte*)img.ImageData.ToPointer() + i * img.Stride + j * imgBytesPerPixel; double dx = (double)src2[0] - (double)src[0]; double dy = (double)src2[1] - (double)src[1]; double dz = (double)src2[2] - (double)src[2]; @@ -225,7 +230,7 @@ public void TestDistortion() numPts++; } - dstcol += undistortedImage.BitsPerPixel / 8; + dstcol += undistortedImageBytesPerPixel; } dstrow += undistortedImage.Stride; diff --git a/Sources/Calibration/Test.Psi.Calibration/Test.Psi.Calibration.csproj b/Sources/Calibration/Test.Psi.Calibration/Test.Psi.Calibration.csproj index 7af970a68..81f93c2f2 100644 --- a/Sources/Calibration/Test.Psi.Calibration/Test.Psi.Calibration.csproj +++ b/Sources/Calibration/Test.Psi.Calibration/Test.Psi.Calibration.csproj @@ -30,7 +30,7 @@ - + diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/Annotation.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/Annotation.cs deleted file mode 100644 index 9eb6cd264..000000000 --- a/Sources/Data/Microsoft.Psi.Data/Annotations/Annotation.cs +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT license. - -namespace Microsoft.Psi.Data.Annotations -{ - using System.Collections.Generic; - using System.Diagnostics; - - /// - /// Represents an annotation instance. - /// - public class Annotation - { - /// - /// Gets or sets the collection of values in the annotation. - /// - public Dictionary Values { get; set; } - } -} \ No newline at end of file diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationAttributeSchema.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationAttributeSchema.cs new file mode 100644 index 000000000..69b4c2607 --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationAttributeSchema.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data.Annotations +{ + using System.Collections.Generic; + + /// + /// Represents an annotation attribute schema. + /// + public class AnnotationAttributeSchema + { + /// + /// Initializes a new instance of the class. + /// + /// The annotation attribute name. + /// The annotation attribute description. + /// The annotation value schema for this attribute. + public AnnotationAttributeSchema(string name, string description, IAnnotationValueSchema valueSchema) + { + this.Name = name; + this.Description = description; + this.ValueSchema = valueSchema; + } + + /// + /// Gets the name of the annotation attribute. + /// + public string Name { get; } + + /// + /// Gets a description of the annotation attribute. + /// + public string Description { get; } + + /// + /// Gets the annotation value schema for this annotation attribute. + /// + public IAnnotationValueSchema ValueSchema { get; } + + /// + /// Creates an attribute value from a specified string. + /// + /// The attribute value as a string. + /// An attribute value which can be used to populate a time interval annotation. + public Dictionary CreateAttribute(string value) + => new () { { this.Name, this.ValueSchema.CreateAnnotationValue(value) } }; + } +} \ No newline at end of file diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationDefinition.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationDefinition.cs deleted file mode 100644 index 0357be3a9..000000000 --- a/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationDefinition.cs +++ /dev/null @@ -1,153 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT license. - -namespace Microsoft.Psi.Data.Annotations -{ - using System; - using System.Collections.Generic; - using System.IO; - using System.Linq; - using System.Reflection; - using Microsoft.Psi.Data.Converters; - using Microsoft.Psi.Data.Helpers; - using Newtonsoft.Json; - using Newtonsoft.Json.Converters; - - /// - /// Represents a definition of an enumeration. - /// - public class AnnotationDefinition - { - private static JsonSerializerSettings jsonSerializerSettings = new JsonSerializerSettings() - { - Formatting = Formatting.Indented, - NullValueHandling = NullValueHandling.Ignore, - TypeNameHandling = TypeNameHandling.Auto, - TypeNameAssemblyFormatHandling = TypeNameAssemblyFormatHandling.Simple, - Converters = { new StringEnumConverter() }, - SerializationBinder = new SafeSerializationBinder(), - }; - - /// - /// Initializes a new instance of the class. - /// - /// The name of the annotation definition. - public AnnotationDefinition(string name) - { - this.Name = name; - this.SchemaDefinitions = new List(); - } - - /// - /// Gets the collection of schema definitions in the annotation definition. - /// - public List SchemaDefinitions { get; private set; } - - /// - /// Gets the name of the annotation definition. - /// - public string Name { get; private set; } - - /// - /// Loads an annotation definition from disk. - /// - /// The full path and filename of the annotation definition to load. - /// The requested annotation definition if it exists, otherwise null. - public static AnnotationDefinition Load(string fileName) - { - if (!File.Exists(fileName)) - { - return null; - } - - try - { - using (StreamReader streamReader = new StreamReader(fileName)) - { - JsonReader reader = new JsonTextReader(streamReader); - JsonSerializer serializer = JsonSerializer.Create(jsonSerializerSettings); - return serializer.Deserialize(reader); - } - } - catch (Exception) - { - return null; - } - } - - /// - /// Gets a value indicating whether this annotation definition contains an annotation schema definition with a specified name. - /// - /// The name of the schem a definition to search for. - /// True if the annotation difinition contains an annotation schema definition with the specified name, otherwise returns false. - public bool ContainsSchemaDefinition(string schemaDefinitionName) - { - return this.SchemaDefinitions.Exists(s => s.Name == schemaDefinitionName); - } - - /// - /// Gets a specified annotation schema definition. - /// - /// The name of the schem a definition to search for. - /// The requested annotation schema definition if it exists in this annotation definition, otherwise null. - public AnnotationSchemaDefinition GetSchemaDefinition(string schemaDefinitionName) - { - return this.SchemaDefinitions.FirstOrDefault(s => s.Name == schemaDefinitionName); - } - - /// - /// Adds a new annotation schema definition to this annotation definition. - /// - /// The annotation schema definition to add to the collection. - public void AddSchemaDefinition(AnnotationSchemaDefinition schemaDefinition) - { - if (this.ContainsSchemaDefinition(schemaDefinition.Name)) - { - throw new ApplicationException(string.Format("AnnotationDefinition {0} already contains a schema definition named {1}.", this.Name, schemaDefinition.Name)); - } - - this.SchemaDefinitions.Add(schemaDefinition); - } - - /// - /// Creates a new time interval annotation instance based on this annotation definition. - /// - /// The annotation's interval. - /// A new time interval annotation. - public TimeIntervalAnnotation CreateTimeIntervalAnnotation(TimeInterval timeInterval) - { - // Create the collection of initial values for the annotation - // based on the default values of each schema in the definition. - Dictionary values = new Dictionary(); - foreach (AnnotationSchemaDefinition schemaDefinition in this.SchemaDefinitions) - { - MethodInfo defaultValueProperty = schemaDefinition.Schema.GetType().GetProperty("DefaultValue").GetGetMethod(false); - values[schemaDefinition.Name] = defaultValueProperty.Invoke(schemaDefinition.Schema, new object[] { }); - } - - return new TimeIntervalAnnotation(timeInterval, values); - } - - /// - /// Saves this annotation definition to disk. - /// - /// The full path and filename of the location to save this annotation definition. - public void Save(string fileName) - { - StreamWriter jsonFile = null; - try - { - jsonFile = File.CreateText(fileName); - using (var jsonWriter = new JsonTextWriter(jsonFile)) - { - JsonSerializer serializer = JsonSerializer.Create(jsonSerializerSettings); - serializer.Serialize(jsonWriter, this); - } - } - finally - { - jsonFile?.Dispose(); - } - } - } -} \ No newline at end of file diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchema.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchema.cs new file mode 100644 index 000000000..5264f0b6a --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchema.cs @@ -0,0 +1,199 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data.Annotations +{ + using System; + using System.Collections.Generic; + using System.IO; + using System.Linq; + using Microsoft.Psi.Data.Helpers; + using Newtonsoft.Json; + using Newtonsoft.Json.Converters; + + /// + /// Represents an annotation schema. + /// + public class AnnotationSchema + { + private static readonly JsonSerializerSettings JsonSerializerSettings = new () + { + Formatting = Formatting.Indented, + NullValueHandling = NullValueHandling.Ignore, + TypeNameHandling = TypeNameHandling.Auto, + TypeNameAssemblyFormatHandling = TypeNameAssemblyFormatHandling.Simple, + Converters = { new StringEnumConverter() }, + SerializationBinder = new SafeSerializationBinder(), + }; + + /// + /// Initializes a new instance of the class. + /// + /// The name of the annotation schema. + public AnnotationSchema(string name) + { + this.Name = name; + this.AttributeSchemas = new List(); + } + + /// + /// Gets the name of the annotation schema. + /// + public string Name { get; private set; } + + /// + /// Gets the collection of attribute schemas. + /// + public List AttributeSchemas { get; private set; } + + /// + /// Loads an annotation schema from disk. + /// + /// The full path and filename of the annotation schema to load. + /// The requested annotation schema if it exists, otherwise null. + public static AnnotationSchema Load(string fileName) + { + if (!File.Exists(fileName)) + { + return null; + } + + try + { + using var streamReader = new StreamReader(fileName); + return Load(streamReader); + } + catch (Exception) + { + return null; + } + } + + /// + /// Loads an annotation schema from disk. + /// + /// The full path and filename of the annotation schema to load. + /// The requested annotation schema if it exists, otherwise null. + public static AnnotationSchema LoadOrDefault(string fileName) + { + if (!File.Exists(fileName)) + { + return null; + } + + try + { + using var streamReader = new StreamReader(fileName); + JsonReader reader = new JsonTextReader(streamReader); + JsonSerializer serializer = JsonSerializer.Create(JsonSerializerSettings); + var annotationSchema = serializer.Deserialize(reader); + + // Perform simple deserialization checks + if (string.IsNullOrEmpty(annotationSchema.Name)) + { + throw new Exception("Deserialized annotation schema has empty name."); + } + else if (annotationSchema.AttributeSchemas.Count == 0) + { + throw new Exception("Deserialized annotation schema has no attributes."); + } + else if (annotationSchema.AttributeSchemas.Any(s => string.IsNullOrEmpty(s.Name))) + { + throw new Exception("Deserialized annotation schema which contains attributes with no names specified."); + } + + return annotationSchema; + } + catch (Exception) + { + return null; + } + } + + /// + /// Gets a value indicating whether this annotation schema contains a specified attribute. + /// + /// The name of the attribute. + /// True if the annotation schema contains the specified attribute, otherwise false. + public bool ContainsAttribute(string attributeName) + => this.AttributeSchemas.Any(ad => ad.Name == attributeName); + + /// + /// Gets the schema for a specified attribute. + /// + /// The name of the attribute. + /// The schema for a specified attribute if the attribute exists, otherwise null. + public AnnotationAttributeSchema GetAttributeSchema(string attributeName) + => this.AttributeSchemas.FirstOrDefault(ad => ad.Name == attributeName); + + /// + /// Adds a new attribute to this annotation schema. + /// + /// The attribute schema to add. + public void AddAttributeSchema(AnnotationAttributeSchema attributeSchema) + { + if (this.ContainsAttribute(attributeSchema.Name)) + { + throw new ApplicationException(string.Format("AnnotationSchema {0} already contains an attribute named {1}.", this.Name, attributeSchema.Name)); + } + + this.AttributeSchemas.Add(attributeSchema); + } + + /// + /// Creates a new time interval annotation instance on a specified track, based on this annotation schema. + /// + /// The time interval. + /// The track. + /// A new time interval annotation. + public TimeIntervalAnnotation CreateDefaultTimeIntervalAnnotation(TimeInterval timeInterval, string track) + { + // Create the collection of initial values for the annotation based on the default values + var values = new Dictionary(); + foreach (var attributeSchema in this.AttributeSchemas) + { + values[attributeSchema.Name] = attributeSchema.ValueSchema.GetDefaultAnnotationValue(); + } + + return new TimeIntervalAnnotation(timeInterval, track, values); + } + + /// + /// Saves this annotation schema to a specified file. + /// + /// The full path and filename to save this annotation schema to. + public void Save(string fileName) + { + StreamWriter jsonFile = null; + try + { + jsonFile = File.CreateText(fileName); + using var jsonWriter = new JsonTextWriter(jsonFile); + JsonSerializer.Create(JsonSerializerSettings).Serialize(jsonWriter, this); + } + finally + { + jsonFile?.Dispose(); + } + } + + private static AnnotationSchema Load(StreamReader streamReader) + { + var reader = new JsonTextReader(streamReader); + var serializer = JsonSerializer.Create(JsonSerializerSettings); + var annotationSchema = serializer.Deserialize(reader); + + // Perform simple deserialization checks + if (string.IsNullOrEmpty(annotationSchema.Name)) + { + throw new Exception("Deserialized annotation schema has empty name."); + } + else if (annotationSchema.AttributeSchemas.Count == 0) + { + throw new Exception("Deserialized annotation schema has no attributes."); + } + + return annotationSchema; + } + } +} \ No newline at end of file diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchemaDefinition.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchemaDefinition.cs deleted file mode 100644 index 56b9c68b0..000000000 --- a/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchemaDefinition.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT license. - -namespace Microsoft.Psi.Data.Annotations -{ - /// - /// Represents an annotation schema definition. - /// - public class AnnotationSchemaDefinition - { - /// - /// Initializes a new instance of the class. - /// - /// The name of the annotation schema definition. - /// The schema of the annotation schema definition. - public AnnotationSchemaDefinition(string name, IAnnotationSchema schema) - { - this.Name = name; - this.Schema = schema; - } - - /// - /// Gets or sets the name of the schema defintion. - /// - public string Name { get; set; } - - /// - /// Gets or sets the annotation schema. - /// - public IAnnotationSchema Schema { get; set; } - } -} \ No newline at end of file diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchemaValueMetadata.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchemaValueMetadata.cs deleted file mode 100644 index 5562873cb..000000000 --- a/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchemaValueMetadata.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT license. - -namespace Microsoft.Psi.Data.Annotations -{ - using System.Drawing; - - /// - /// Represents the metadata associated with a schema value. - /// - public class AnnotationSchemaValueMetadata - { - /// - /// Gets or sets the color for drawing the annotation area's border. - /// - public Color BorderColor { get; set; } - - /// - /// Gets or sets the color for drawing the annotation area's interior. - /// - public Color FillColor { get; set; } - - /// - /// Gets or sets the color for drawing the annotation's text. - /// - public Color TextColor { get; set; } - - /// - /// Gets or sets the width of the annotation's border. - /// - public double BorderWidth { get; set; } - - /// - /// Gets or sets a description of the value. - /// - public string Description { get; set; } - } -} \ No newline at end of file diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchema{T}.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchema{T}.cs deleted file mode 100644 index f43a3f77f..000000000 --- a/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationSchema{T}.cs +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT license. - -namespace Microsoft.Psi.Data.Annotations -{ - using System.Drawing; - - /// - /// Represents an annotation schema. - /// - /// The datatype of the values contained in the schema. - public class AnnotationSchema : IAnnotationSchema - { - /// - /// Initializes a new instance of the class. - /// - /// The name of the finite annotation schema. - /// The default value for new instances of the schema. - /// The metadata to use for all values in the schema unless overridden by a specific value's metadata. - public AnnotationSchema(string name, T defaultValue, AnnotationSchemaValueMetadata metadata = null) - { - this.Name = name; - this.DefaultValue = defaultValue; - this.Metadata = metadata ?? CreateDefaultMetadata(); - } - - /// - public virtual string Name { get; set; } - - /// - /// Gets or sets the default value for this annotation schema. - /// - public virtual T DefaultValue { get; set; } - - /// - /// Gets or sets the metadata associated with the finite annotation schema value. - /// - public AnnotationSchemaValueMetadata Metadata { get; set; } - - /// - public virtual bool IsFiniteAnnotationSchema => false; - - /// - /// Gets the schema metadata for a given schema value. - /// - /// The value for which to retrieve the schema value metadata. - /// The metadata for the schema value, or null if the valule is not valid. - public virtual AnnotationSchemaValueMetadata GetMetadata(T value) - { - return this.Metadata; - } - - /// - /// Determines whether a value is a valid schema value. - /// - /// The value to validate. - /// True if the value is a valid schema value, otherwise false. - public virtual bool IsValid(T value) - { - return true; - } - - private static AnnotationSchemaValueMetadata CreateDefaultMetadata() - { - return new AnnotationSchemaValueMetadata() - { - BorderColor = Color.LightGray, - BorderWidth = 1, - Description = null, - FillColor = Color.DarkGray, - TextColor = Color.White, - }; - } - } -} \ No newline at end of file diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationValueSchema{T}.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationValueSchema{T}.cs new file mode 100644 index 000000000..e9949d773 --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationValueSchema{T}.cs @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data.Annotations +{ + using System.Drawing; + + /// + /// Represents an annotation value schema. + /// + /// The datatype of the values contained in the schema. + public abstract class AnnotationValueSchema : IAnnotationValueSchema + { + /// + /// Initializes a new instance of the class. + /// + /// The default value for new instances of the schema. + /// The fill color. + /// The text color. + public AnnotationValueSchema(T defaultValue, Color fillColor, Color textColor) + { + this.DefaultValue = defaultValue; + this.FillColor = fillColor; + this.TextColor = textColor; + } + + /// + /// Gets the default value for this schema. + /// + public T DefaultValue { get; } + + /// + /// Gets the fill color. + /// + public Color FillColor { get; } + + /// + /// Gets the text color. + /// + public Color TextColor { get; } + + /// + public IAnnotationValue GetDefaultAnnotationValue() => new AnnotationValue(this.DefaultValue, this.FillColor, this.TextColor); + + /// + public IAnnotationValue CreateAnnotationValue(string value) => new AnnotationValue(this.CreateValue(value), this.FillColor, this.TextColor); + + /// + public bool IsValid(IAnnotationValue annotationValue) + => annotationValue.FillColor.Equals(this.FillColor) && annotationValue.TextColor.Equals(this.TextColor); + + /// + /// Creates a value from the specified string. + /// + /// The value specified as a string. + /// The value. + public abstract T CreateValue(string value); + } +} \ No newline at end of file diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationValue{T}.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationValue{T}.cs new file mode 100644 index 000000000..e1c472fea --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/Annotations/AnnotationValue{T}.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data.Annotations +{ + using System.Drawing; + + /// + /// Represents an annotation value of a specific type. + /// + /// The datatype of the value. + public class AnnotationValue : IAnnotationValue + { + /// + /// Initializes a new instance of the class. + /// + /// The value of the schema value. + /// The value of the fill color. + /// The value of the text color. + public AnnotationValue(T value, Color fillColor, Color textColor) + { + this.Value = value; + this.FillColor = fillColor; + this.TextColor = textColor; + } + + /// + /// Gets or sets the schema value. + /// + public T Value { get; set; } + + /// + public string ValueAsString => this.Value?.ToString(); + + /// + /// Gets the color for drawing the annotation attribute area's interior. + /// + public Color FillColor { get; } + + /// + /// Gets the color for drawing the annotation attribute value's text. + /// + public Color TextColor { get; } + } +} \ No newline at end of file diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/EnumerableAnnotationValueSchema{T}.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/EnumerableAnnotationValueSchema{T}.cs new file mode 100644 index 000000000..70fde6ee8 --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/Annotations/EnumerableAnnotationValueSchema{T}.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data.Annotations +{ + using System; + using System.Collections.Generic; + using System.Linq; + + /// + /// Represents an enumerable annotation value schema. + /// + /// The datatype of the values in the schema. + public class EnumerableAnnotationValueSchema : IEnumerableAnnotationValueSchema + { + /// + /// Initializes a new instance of the class. + /// + /// The set of possible annotation values for the schema. + /// The default value for new instances of the schema. + public EnumerableAnnotationValueSchema(List> possibleValues, T defaultValue) + { + this.PossibleValues = possibleValues; + this.DefaultValue = defaultValue; + } + + /// + /// Gets the default value for this schema. + /// + public T DefaultValue { get; } + + /// + /// Gets or sets the set of possible annotation values. + /// + public List> PossibleValues { get; set; } + + /// + public IAnnotationValue CreateAnnotationValue(string value) + { + var annotationValue = this.PossibleValues.FirstOrDefault(v => v.Value.ToString() == value); + if (annotationValue == null) + { + throw new Exception("Cannot convert specified string into a valid annotation value."); + } + else + { + return annotationValue; + } + } + + /// + public IAnnotationValue GetDefaultAnnotationValue() => this.PossibleValues.First(v => v.Value.Equals(this.DefaultValue)); + + /// + public bool IsValid(IAnnotationValue annotationValue) => this.PossibleValues.Any(v => v.Equals(annotationValue)); + + /// + public IEnumerable GetPossibleAnnotationValues() + => this.PossibleValues; + } +} \ No newline at end of file diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/FiniteAnnotationSchemaValue{T}.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/FiniteAnnotationSchemaValue{T}.cs deleted file mode 100644 index 5c489e356..000000000 --- a/Sources/Data/Microsoft.Psi.Data/Annotations/FiniteAnnotationSchemaValue{T}.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT license. - -namespace Microsoft.Psi.Data.Annotations -{ - /// - /// Represents a value in a finite annotation schema. - /// - /// The datatype of the value. - public class FiniteAnnotationSchemaValue - { - /// - /// Initializes a new instance of the class. - /// - /// The value of the schema value. - /// The metadata for the value, or null if the schema's metadata should be used. - public FiniteAnnotationSchemaValue(T value, AnnotationSchemaValueMetadata metadata = null) - { - this.Value = value; - this.Metadata = metadata; - } - - /// - /// Gets or sets the schema value. - /// - public T Value { get; set; } - - /// - /// Gets or sets the metadata associated with the finite annotation schema value. - /// - public AnnotationSchemaValueMetadata Metadata { get; set; } - } -} \ No newline at end of file diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/FiniteAnnotationSchema{T}.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/FiniteAnnotationSchema{T}.cs deleted file mode 100644 index fa952bb1a..000000000 --- a/Sources/Data/Microsoft.Psi.Data/Annotations/FiniteAnnotationSchema{T}.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT license. - -namespace Microsoft.Psi.Data.Annotations -{ - using System.Collections.Generic; - using System.Linq; - using System.Runtime.Serialization; - - /// - /// Represents a finite annotation schema. - /// - /// The datatype of the values in the schema. - public class FiniteAnnotationSchema : AnnotationSchema - { - /// - /// Initializes a new instance of the class. - /// - /// The name of the finite annotation schema. - /// The collection of values for the schema. - /// The default value for new instances of the schema. - public FiniteAnnotationSchema(string name, List> schemaValues, T defaultValue) - : base(name, defaultValue) - { - this.SchemaValues = schemaValues; - } - - /// - public override bool IsFiniteAnnotationSchema => true; - - /// - /// Gets or sets the list of schema values in the schema. - /// - public List> SchemaValues { get; set; } - - /// - /// Gets the collection of valid values for the finite annotation. - /// - [IgnoreDataMember] - public IEnumerable Values => this.SchemaValues.Select(v => v.Value); - - /// - public override bool IsValid(T value) - { - return this.SchemaValues.Any(v => v.Value.Equals(value)); - } - - /// - public override AnnotationSchemaValueMetadata GetMetadata(T value) - { - FiniteAnnotationSchemaValue schemaValue = this.SchemaValues.FirstOrDefault(v => v.Value.Equals(value)); - return schemaValue != null && schemaValue.Metadata != null ? schemaValue.Metadata : this.Metadata; - } - - /// - /// Sets the metadata for a given annotation schema value. - /// - /// The schema value for which to set the metadata. - /// The metadata to associate with the value. - public void SetMetadata(T value, AnnotationSchemaValueMetadata metadata) - { - FiniteAnnotationSchemaValue schemaValue = this.SchemaValues.First(v => v.Value.Equals(value)); - schemaValue.Metadata = metadata; - } - } -} \ No newline at end of file diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/IAnnotationSchema.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/IAnnotationSchema.cs deleted file mode 100644 index 76ddb21c5..000000000 --- a/Sources/Data/Microsoft.Psi.Data/Annotations/IAnnotationSchema.cs +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT license. - -namespace Microsoft.Psi.Data.Annotations -{ - /// - /// Represents an annotation schema. - /// - public interface IAnnotationSchema - { - /// - /// Gets the name of the schema. - /// - string Name { get; } - - /// - /// Gets a value indicating whether the annotation schema is a finite annotation schema. - /// - bool IsFiniteAnnotationSchema { get; } - } -} diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/IAnnotationValue.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/IAnnotationValue.cs new file mode 100644 index 000000000..2b0b7102f --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/Annotations/IAnnotationValue.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data.Annotations +{ + using System.Drawing; + + /// + /// Defines an annotation value in an untyped fashion. + /// + public interface IAnnotationValue + { + /// + /// Gets a string representation of the annotation value. + /// + public string ValueAsString { get; } + + /// + /// Gets the color for drawing the annotation value area's interior. + /// + public Color FillColor { get; } + + /// + /// Gets the color for drawing the annotation value text. + /// + public Color TextColor { get; } + } +} diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/IAnnotationValueSchema.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/IAnnotationValueSchema.cs new file mode 100644 index 000000000..4036a55b4 --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/Annotations/IAnnotationValueSchema.cs @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data.Annotations +{ + /// + /// Defines an annotation value schema. + /// + public interface IAnnotationValueSchema + { + /// + /// Gets the default value for this annotation value schema. + /// + /// The default value. + public IAnnotationValue GetDefaultAnnotationValue(); + + /// + /// Creates an annotation value from a specified string. + /// + /// The annotation value string. + /// The annotation value. + public IAnnotationValue CreateAnnotationValue(string value); + + /// + /// Gets a value indicating whether a specified annotation value is valid. + /// + /// The annotation value. + /// True if the specified annotation value is valid, otherwise false. + public bool IsValid(IAnnotationValue annotationValue); + } +} diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/IEnumerableAnnotationValueSchema.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/IEnumerableAnnotationValueSchema.cs new file mode 100644 index 000000000..58539ca74 --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/Annotations/IEnumerableAnnotationValueSchema.cs @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data.Annotations +{ + using System.Collections.Generic; + + /// + /// Defines an enumerable annotation value schema, i.e., a value schema with + /// a fixed set of possible values. + /// + public interface IEnumerableAnnotationValueSchema : IAnnotationValueSchema + { + /// + /// Gets the set of possible values for this annotation value schema. + /// + /// The set of possible values. + public IEnumerable GetPossibleAnnotationValues(); + } +} diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/Operators.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/Operators.cs new file mode 100644 index 000000000..a78ddd03a --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/Annotations/Operators.cs @@ -0,0 +1,156 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data.Annotations +{ + using System; + using System.Collections.Generic; + using System.Linq; + using Microsoft.Psi.Components; + + /// + /// Implements stream operators for manipulating annotations. + /// + public static class Operators + { + /// + /// Converts a stream of dictionaries with boolean values into a corresponding stream of time interval annotations. + /// + /// The type of key in the source stream. + /// The source stream. + /// A function that, given a key, produces a track name and set of attribute values for the annotation. + /// An optional delivery policy. + /// A time interval annotation stream. + public static IProducer ToTimeIntervalAnnotations( + this IProducer> source, + Func AttributeValues)> annotationConstructor, + DeliveryPolicy> deliveryPolicy = null) + => source.ToTimeIntervalAnnotations( + dict => dict.Where(kvp => kvp.Value).ToDictionary(kvp => kvp.Key, kvp => kvp.Value), + (k, _) => annotationConstructor(k), + deliveryPolicy); + + /// + /// Converts a stream of dictionaries into a corresponding stream of time interval annotations. + /// + /// The type of key in the source stream. + /// The type of values in the source stream. + /// The source stream. + /// A function that, given a key and value, produces a track name and set of attribute values for the annotation. + /// An optional delivery policy. + /// A time interval annotation stream. + public static IProducer ToTimeIntervalAnnotations( + this IProducer> source, + Func AttributeValues)> annotationConstructor, + DeliveryPolicy> deliveryPolicy = null) + => source.ToTimeIntervalAnnotations( + _ => _, + annotationConstructor, + deliveryPolicy); + + /// + /// Converts a stream into a corresponding stream of time interval annotations. + /// + /// The type of the input. + /// The type of key in the source stream. + /// The type of values in the source stream. + /// The source stream. + /// A function that, given an input message produces a dictionary of key-values that generates the annotation set. + /// A function that, given a key and value, produces a track name and set of attribute values for the annotation. + /// An optional delivery policy. + /// A time interval annotation stream. + private static IProducer ToTimeIntervalAnnotations( + this IProducer source, + Func> selector, + Func AttributeValues)> annotationConstructor, + DeliveryPolicy deliveryPolicy = null) + { + var intervals = new Dictionary(); + + var processor = new Processor( + source.Out.Pipeline, + (input, e, emitter) => + { + var timeIntervalAnnotationSet = default(TimeIntervalAnnotationSet); + + var dictionary = selector(input); + + // Add incoming objects to state + foreach (var key in dictionary.Keys) + { + if (intervals.ContainsKey(key)) + { + intervals[key] = (new TimeInterval(intervals[key].TimeInterval.Left, e.OriginatingTime), Serializer.DeepClone(dictionary[key])); + } + else + { + intervals.Add(key, (new TimeInterval(e.OriginatingTime, e.OriginatingTime), Serializer.DeepClone(dictionary[key]))); + } + } + + // For all ids no longer in the incoming message + var removeKeys = new List(); + foreach (var key in intervals.Keys) + { + if (!dictionary.ContainsKey(key)) + { + // In this case we need to post the object + removeKeys.Add(key); + (var annotationTrack, var attributeValues) = annotationConstructor(key, intervals[key].Value); + var annotation = new TimeIntervalAnnotation(intervals[key].TimeInterval, annotationTrack, attributeValues); + if (timeIntervalAnnotationSet == null) + { + timeIntervalAnnotationSet = new TimeIntervalAnnotationSet(annotation); + } + else + { + timeIntervalAnnotationSet.AddAnnotation(annotation); + } + } + } + + foreach (var id in removeKeys) + { + intervals.Remove(id); + } + + if (timeIntervalAnnotationSet != null) + { + emitter.Post(timeIntervalAnnotationSet, timeIntervalAnnotationSet.EndTime); + } + }, + (closingTime, emitter) => + { + // If we have any open interval + if (intervals.Any()) + { + var timeIntervalAnnotationSet = default(TimeIntervalAnnotationSet); + + // For each open interval + foreach (var key in intervals.Keys) + { + // Edit the end time to be the closing time + var newInterval = new TimeInterval(intervals[key].TimeInterval.Left, closingTime); + + // Append to the annotation set + (var annotationTrack, var attributeValues) = annotationConstructor(key, intervals[key].Value); + var annotation = new TimeIntervalAnnotation(newInterval, annotationTrack, attributeValues); + if (timeIntervalAnnotationSet == null) + { + timeIntervalAnnotationSet = new TimeIntervalAnnotationSet(annotation); + } + else + { + timeIntervalAnnotationSet.AddAnnotation(annotation); + } + } + + // Post the value + emitter.Post(timeIntervalAnnotationSet, closingTime); + } + }); + + return source.PipeTo(processor, deliveryPolicy); + } + } +} diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/StringAnnotationValueSchema.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/StringAnnotationValueSchema.cs new file mode 100644 index 000000000..c0404884d --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/Annotations/StringAnnotationValueSchema.cs @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data.Annotations +{ + using System.Drawing; + + /// + /// Represents a string annotation value schema. + /// + public class StringAnnotationValueSchema : AnnotationValueSchema + { + /// + /// Initializes a new instance of the class. + /// + /// The default value for new instances of the schema. + /// The fill color. + /// The text color. + public StringAnnotationValueSchema(string defaultValue, Color fillColor, Color textColor) + : base(defaultValue, fillColor, textColor) + { + } + + /// + public override string CreateValue(string value) => value; + } +} diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/TimeIntervalAnnotation.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/TimeIntervalAnnotation.cs index 5082f00e6..a7d37c293 100644 --- a/Sources/Data/Microsoft.Psi.Data/Annotations/TimeIntervalAnnotation.cs +++ b/Sources/Data/Microsoft.Psi.Data/Annotations/TimeIntervalAnnotation.cs @@ -6,24 +6,36 @@ namespace Microsoft.Psi.Data.Annotations using System.Collections.Generic; /// - /// Represents an instance of a time interval annotation. + /// Represents a time interval annotation. /// - public class TimeIntervalAnnotation : Annotation + public class TimeIntervalAnnotation { /// /// Initializes a new instance of the class. /// /// The interval over which the annotation occurs. - /// The list of values for the annotation. - public TimeIntervalAnnotation(TimeInterval interval, Dictionary values) + /// The name of the annotation track. + /// The set of attribute values for the annotation. + public TimeIntervalAnnotation(TimeInterval interval, string track, Dictionary attributeValues) { this.Interval = interval; - this.Values = values; + this.Track = track; + this.AttributeValues = attributeValues; } /// /// Gets or sets the interval over which this annotation occurs. /// public TimeInterval Interval { get; set; } + + /// + /// Gets or sets the track of the this annotation. + /// + public string Track { get; set; } + + /// + /// Gets or sets the collection of values in the annotation. + /// + public Dictionary AttributeValues { get; set; } } } diff --git a/Sources/Data/Microsoft.Psi.Data/Annotations/TimeIntervalAnnotationSet.cs b/Sources/Data/Microsoft.Psi.Data/Annotations/TimeIntervalAnnotationSet.cs new file mode 100644 index 000000000..d3159dbed --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/Annotations/TimeIntervalAnnotationSet.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data.Annotations +{ + using System; + using System.Collections.Generic; + using System.Linq; + + /// + /// Represents a set of overlapping time-interval annotations that belong to separate tracks but end at the same time. + /// + /// + /// This data structure provides the basis for persisting overlapping time interval annotations + /// in \psi streams. It captures a set of overlapping time interval annotations that are on + /// different tracks but end at the same time, captured by . + /// When persisted to a stream, the originating time of the + /// should correspond to the . + /// + public class TimeIntervalAnnotationSet + { + private readonly Dictionary data = new (); + + /// + /// Initializes a new instance of the class. + /// + /// The time interval annotation. + public TimeIntervalAnnotationSet(TimeIntervalAnnotation timeIntervalAnnotation) + { + this.data.Add(timeIntervalAnnotation.Track, timeIntervalAnnotation); + } + + /// + /// Gets the end time for the annotation set. + /// + public DateTime EndTime => this.data.Values.First().Interval.Right; + + /// + /// Gets the set of tracks spanned by these time interval annotations. + /// + public IEnumerable Tracks => this.data.Keys; + + /// + /// Gets the time interval annotation for a specified track name. + /// + /// The track name. + /// The corresponding time interval annotation. + public TimeIntervalAnnotation this[string track] => this.data[track]; + + /// + /// Adds a specified time interval annotation. + /// + /// The time interval annotation to add. + public void AddAnnotation(TimeIntervalAnnotation timeIntervalAnnotation) + { + if (timeIntervalAnnotation.Interval.Right != this.EndTime) + { + throw new ArgumentException("Cannot add a time interval annotation with a different end time to a time interval annotation set."); + } + + this.data.Add(timeIntervalAnnotation.Track, timeIntervalAnnotation); + } + + /// + /// Removes an annotation specified by a track name. + /// + /// The track name for the annotation to remove. + public void RemoveAnnotation(string track) + { + if (this.data.Count() == 1) + { + throw new InvalidOperationException("Cannot remove the last time interval annotation from a time interval annotation set."); + } + + this.data.Remove(track); + } + + /// + /// Gets a value indicating whether the annotation set contains an annotation for the specified track. + /// + /// The track name. + /// True if the annotation set contains an annotation for the specified track, otherwise false. + public bool ContainsTrack(string track) => this.data.ContainsKey(track); + } +} diff --git a/Sources/Data/Microsoft.Psi.Data/BatchProcessingTask.cs b/Sources/Data/Microsoft.Psi.Data/BatchProcessingTask.cs new file mode 100644 index 000000000..ee19f89e0 --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/BatchProcessingTask.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data +{ + /// + /// Defines an abstract base class for batch processing tasks. + /// + /// The type of the batch processing task configuration object. + /// + /// To implement a batch processing task, implement a derived class from this base abstract. + /// + public abstract class BatchProcessingTask : IBatchProcessingTask + where TBatchProcessingTaskConfiguration : BatchProcessingTaskConfiguration, new() + { + /// + /// Gets the default configuration for the batch procesing task. + /// + /// The default configuration. + public virtual TBatchProcessingTaskConfiguration GetDefaultConfiguration() => new (); + + /// + /// Runs a batch processing task. + /// + /// The pipeline used to run the task. + /// The session importer. + /// The exporter to write resulting streams to. + /// The configuration for the batch processing task. + public abstract void Run(Pipeline pipeline, SessionImporter sessionImporter, Exporter exporter, TBatchProcessingTaskConfiguration configuration); + + /// + BatchProcessingTaskConfiguration IBatchProcessingTask.GetDefaultConfiguration() => this.GetDefaultConfiguration(); + + /// + void IBatchProcessingTask.Run(Pipeline pipeline, SessionImporter sessionImporter, Exporter exporter, BatchProcessingTaskConfiguration configuration) => + this.Run(pipeline, sessionImporter, exporter, configuration as TBatchProcessingTaskConfiguration); + } +} diff --git a/Sources/Runtime/Microsoft.Psi/Common/BatchProcessingTaskAttribute.cs b/Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskAttribute.cs similarity index 91% rename from Sources/Runtime/Microsoft.Psi/Common/BatchProcessingTaskAttribute.cs rename to Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskAttribute.cs index 178725e02..9238e69fa 100644 --- a/Sources/Runtime/Microsoft.Psi/Common/BatchProcessingTaskAttribute.cs +++ b/Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskAttribute.cs @@ -1,14 +1,14 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT license. -namespace Microsoft.Psi +namespace Microsoft.Psi.Data { using System; /// /// Represents a batch processing task attribute. /// - [AttributeUsage(AttributeTargets.Method, AllowMultiple = false)] + [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class, AllowMultiple = false)] public class BatchProcessingTaskAttribute : Attribute { /// @@ -26,12 +26,12 @@ public BatchProcessingTaskAttribute(string name) } /// - /// Gets the name. + /// Gets the task name. /// public string Name { get; } /// - /// Gets or sets the description. + /// Gets or sets the task description. /// public string Description { get; set; } = null; diff --git a/Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskConfiguration.cs b/Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskConfiguration.cs new file mode 100644 index 000000000..25030aed1 --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskConfiguration.cs @@ -0,0 +1,117 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data +{ + using System.ComponentModel; + using System.Runtime.Serialization; + + /// + /// Represents a configuration for a batch processing task. + /// + public class BatchProcessingTaskConfiguration : ObservableObject + { + private bool replayAllRealTime = false; + private bool deliveryPolicyLatestMessage = false; + private bool enableDiagnostics = false; + private string outputStoreName = null; + private string outputStorePath = null; + private string outputPartitionName = null; + + /// + /// Initializes a new instance of the class. + /// + public BatchProcessingTaskConfiguration() + { + } + + /// + /// Gets the name of the configuration. + /// + [Browsable(false)] + public string Name => "Configuration"; + + /// + /// Gets or sets a value indicating whether to use the descriptor when executing this batch task. + /// + [DataMember] + [DisplayName("Replay in Real Time")] + [Description("Indicates whether the task will execute by performing replay in real time.")] + public bool ReplayAllRealTime + { + get => this.replayAllRealTime; + set { this.Set(nameof(this.ReplayAllRealTime), ref this.replayAllRealTime, value); } + } + + /// + /// Gets or sets a value indicating whether to use the pipeline-level delivery policy when executing this batch task. + /// + [DataMember] + [DisplayName("Use Latest Message Delivery Policy")] + [Description("Indicates whether the task will execute with a latest message global delivery policy.")] + public bool DeliveryPolicyLatestMessage + { + get => this.deliveryPolicyLatestMessage; + set { this.Set(nameof(this.DeliveryPolicyLatestMessage), ref this.deliveryPolicyLatestMessage, value); } + } + + /// + /// Gets or sets a value indicating whether to enable pipeline diagnostics when running this batch task. + /// + [DataMember] + [DisplayName("Enable diagnostics")] + [Description("Indicates whether diagnostics will be enabled on the pipeline while executing the task.")] + public bool EnableDiagnostics + { + get => this.enableDiagnostics; + set { this.Set(nameof(this.EnableDiagnostics), ref this.enableDiagnostics, value); } + } + + /// + /// Gets or sets the output store name. + /// + [DataMember] + [DisplayName("Output Store Name")] + [Description("The output store name.")] + public string OutputStoreName + { + get => this.outputStoreName; + set { this.Set(nameof(this.OutputStoreName), ref this.outputStoreName, value); } + } + + /// + /// Gets or sets the output store path. + /// + [DataMember] + [DisplayName("Output Store Path")] + [Description("The output store path.")] + public string OutputStorePath + { + get => this.outputStorePath; + set { this.Set(nameof(this.OutputStorePath), ref this.outputStorePath, value); } + } + + /// + /// Gets or sets the output partition name. + /// + [DataMember] + [DisplayName("Output Partition Name")] + [Description("The output partition name.")] + public string OutputPartitionName + { + get => this.outputPartitionName; + set { this.Set(nameof(this.OutputPartitionName), ref this.outputPartitionName, value); } + } + + /// + /// Validates the configuration. + /// + /// A message describing the issue if the configuration is invalid. + /// True if the configuration is valid, false otherwise. + public virtual bool Validate(out string error) + { + error = null; + return true; + } + } +} diff --git a/Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskMetadata.cs b/Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskMetadata.cs new file mode 100644 index 000000000..3cf0b2cfd --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskMetadata.cs @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data +{ + using System; + using System.Reflection; + + /// + /// Represents metadata about a dynamically loaded batch processing task + /// and provides functionality for configuring and executing the task. + /// + public class BatchProcessingTaskMetadata + { + private readonly BatchProcessingTaskAttribute batchProcessingTaskAttribute = null; + private readonly Type batchProcessingTaskType; + private readonly MethodInfo batchProcessingTaskMethodInfo; + + /// + /// Initializes a new instance of the class. + /// + /// The batch processing task type. + /// The batch processing task attribute. + public BatchProcessingTaskMetadata(Type batchProcessingTaskType, BatchProcessingTaskAttribute batchProcessingTaskAttribute) + { + this.batchProcessingTaskType = batchProcessingTaskType; + this.batchProcessingTaskAttribute = batchProcessingTaskAttribute; + } + + /// + /// Initializes a new instance of the class. + /// + /// The batch processing method info. + /// The batch processing task attribute. + public BatchProcessingTaskMetadata(MethodInfo batchProcessingTaskMethodInfo, BatchProcessingTaskAttribute batchProcessingTaskAttribute) + { + this.batchProcessingTaskMethodInfo = batchProcessingTaskMethodInfo; + this.batchProcessingTaskAttribute = batchProcessingTaskAttribute; + } + + /// + /// Gets the batch processing task name. + /// + public string Name => this.batchProcessingTaskAttribute.Name; + + /// + /// Gets the batch processing task description. + /// + public string Description => this.batchProcessingTaskAttribute.Description; + + /// + /// Gets the batch processing task icon source path. + /// + public string IconSourcePath => this.batchProcessingTaskAttribute.IconSourcePath; + + /// + /// Gets a value indicating whether this batch processing task is method based. + /// + private bool IsMethodBased => this.batchProcessingTaskMethodInfo != null; + + /// + /// Gets the default configuration for the batch processing task. + /// + /// The default configuration. + public BatchProcessingTaskConfiguration GetDefaultConfiguration() + { + if (this.IsMethodBased) + { + return new BatchProcessingTaskConfiguration() + { + ReplayAllRealTime = this.batchProcessingTaskAttribute.ReplayAllRealTime, + DeliveryPolicyLatestMessage = this.batchProcessingTaskAttribute.DeliveryPolicyLatestMessage, + OutputStoreName = this.batchProcessingTaskAttribute.OutputStoreName, + OutputStorePath = this.batchProcessingTaskAttribute.OutputStorePath, + OutputPartitionName = this.batchProcessingTaskAttribute.OutputPartitionName ?? "Derived", + }; + } + else + { + var batchProcessingTask = Activator.CreateInstance(this.batchProcessingTaskType) as IBatchProcessingTask; + return batchProcessingTask.GetDefaultConfiguration(); + } + } + + /// + /// Runs the batch processing task. + /// + /// The pipeline to run the task on. + /// The session importer. + /// The exporter. + /// The task configuration. + public void Run(Pipeline pipeline, SessionImporter sessionImporter, Exporter exporter, BatchProcessingTaskConfiguration configuration) + { + if (this.IsMethodBased) + { + this.batchProcessingTaskMethodInfo.Invoke(null, new object[] { pipeline, sessionImporter, exporter }); + } + else + { + var batchProcessingTask = Activator.CreateInstance(this.batchProcessingTaskType) as IBatchProcessingTask; + batchProcessingTask.Run(pipeline, sessionImporter, exporter, configuration); + } + } + } +} diff --git a/Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskOperators.cs b/Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskOperators.cs new file mode 100644 index 000000000..94f6a12aa --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/BatchProcessingTaskOperators.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data +{ + using System; + + /// + /// Provides static extension methods related to batch processing tasks. + /// + public static class BatchProcessingTaskOperators + { + /// + /// Checks if a specified type is a batch processing task type. + /// + /// The type to check. + /// True if the specified type is a batch processing task type, o/w false. + public static bool IsBatchProcessingTaskType(this Type type) + { + if (type == null) + { + return false; + } + else if (type.IsGenericType) + { + if (type.GetGenericTypeDefinition() == typeof(BatchProcessingTask<>)) + { + return true; + } + else + { + return type.BaseType.IsBatchProcessingTaskType(); + } + } + else + { + return type.BaseType.IsBatchProcessingTaskType(); + } + } + } +} diff --git a/Sources/Data/Microsoft.Psi.Data/IBatchProcessingTask.cs b/Sources/Data/Microsoft.Psi.Data/IBatchProcessingTask.cs new file mode 100644 index 000000000..d70407d31 --- /dev/null +++ b/Sources/Data/Microsoft.Psi.Data/IBatchProcessingTask.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Data +{ + /// + /// Defines a batch processing task. + /// + internal interface IBatchProcessingTask + { + /// + /// Gets the default configuration. + /// + /// The default configuration. + public BatchProcessingTaskConfiguration GetDefaultConfiguration(); + + /// + /// Runs the batch processing task with a specified configuration. + /// + /// The pipeline to run the task. + /// The session importer. + /// The exporter to use. + /// The configuration for the task. + public void Run(Pipeline pipeline, SessionImporter sessionImporter, Exporter exporter, BatchProcessingTaskConfiguration configuration); + } +} diff --git a/Sources/Data/Microsoft.Psi.Data/Json/JsonStore.cs b/Sources/Data/Microsoft.Psi.Data/Json/JsonStore.cs index 6b180285a..d19bdbb9b 100644 --- a/Sources/Data/Microsoft.Psi.Data/Json/JsonStore.cs +++ b/Sources/Data/Microsoft.Psi.Data/Json/JsonStore.cs @@ -11,7 +11,7 @@ public static class JsonStore /// /// Creates a new multi-stream JSON store and returns an instance which can be used to write streams to this store. /// - /// The that owns the . + /// The pipeline to add the component to. /// The name of the store to create. /// The path to use. If null, an in-memory store is created. /// Indicates whether to create a numbered subdirectory for each execution of the pipeline. @@ -24,7 +24,7 @@ public static JsonExporter Create(Pipeline pipeline, string name, string rootPat /// /// Opens a JSON store for read and returns an instance which can be used to inspect the store and open the streams. /// - /// The that owns the . + /// The pipeline to add the component to. /// The name of the application that generated the persisted files, or the root name of the files. /// The directory in which the main persisted file resides. /// A instance that can be used to open streams and read messages. diff --git a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Base/ObservableObject.cs b/Sources/Data/Microsoft.Psi.Data/ObservableObject.cs similarity index 98% rename from Sources/Visualization/Microsoft.Psi.Visualization.Windows/Base/ObservableObject.cs rename to Sources/Data/Microsoft.Psi.Data/ObservableObject.cs index 2c2aa3d9b..16288a953 100644 --- a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Base/ObservableObject.cs +++ b/Sources/Data/Microsoft.Psi.Data/ObservableObject.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT license. -namespace Microsoft.Psi.Visualization.Base +namespace Microsoft.Psi.Data { using System.ComponentModel; using System.Runtime.Serialization; diff --git a/Sources/Data/Microsoft.Psi.Data/SessionImporter.cs b/Sources/Data/Microsoft.Psi.Data/SessionImporter.cs index 796e4f291..9b897e556 100644 --- a/Sources/Data/Microsoft.Psi.Data/SessionImporter.cs +++ b/Sources/Data/Microsoft.Psi.Data/SessionImporter.cs @@ -12,7 +12,7 @@ namespace Microsoft.Psi.Data /// public class SessionImporter { - private Dictionary importers = new Dictionary(); + private readonly Dictionary importers = new (); private SessionImporter(Pipeline pipeline, Session session, bool usePerStreamReaders) { @@ -71,7 +71,7 @@ public static SessionImporter Open(Pipeline pipeline, Session session, bool useP /// /// The stream to search for. /// true if any importer contains the named stream; otherwise false. - public bool HasStream(string streamName) + public bool Contains(string streamName) { var all = this.importers.Values.Where(importer => importer.Contains(streamName)); var count = all.Count(); @@ -114,11 +114,11 @@ public IProducer OpenStream(string streamName, Func allocator = null, A } else if (count > 1) { - throw new System.Exception($"Underspecified access to session: multiple partitions contain stream {streamName}"); + throw new Exception($"Underspecified access to session: multiple partitions contain stream {streamName}"); } else { - throw new System.Exception($"Cannot find {streamName}"); + throw new Exception($"Cannot find {streamName}"); } } diff --git a/Sources/Data/Test.Psi.Data/Test.Psi.Data.csproj b/Sources/Data/Test.Psi.Data/Test.Psi.Data.csproj index 720f44fa4..ca9b405a1 100644 --- a/Sources/Data/Test.Psi.Data/Test.Psi.Data.csproj +++ b/Sources/Data/Test.Psi.Data/Test.Psi.Data.csproj @@ -35,7 +35,7 @@ all runtime; build; native; contentfiles; analyzers - + diff --git a/Sources/Imaging/Microsoft.Psi.Imaging.Linux/ImagingOperators.cs b/Sources/Imaging/Microsoft.Psi.Imaging.Linux/ImagingOperators.cs index ec6e0c60f..8eed4256e 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging.Linux/ImagingOperators.cs +++ b/Sources/Imaging/Microsoft.Psi.Imaging.Linux/ImagingOperators.cs @@ -85,7 +85,7 @@ internal static SKImage AsSKImage(this ImageBase image) _ => throw new ArgumentException($"Unsupported pixel format: {image.PixelFormat}"), }; var info = new SKImageInfo(image.Width, image.Height, colorType); - return SKImage.FromPixelData(info, data, image.Stride); + return SKImage.FromPixels(info, data, image.Stride); } } } \ No newline at end of file diff --git a/Sources/Imaging/Microsoft.Psi.Imaging.Linux/Microsoft.Psi.Imaging.Linux.csproj b/Sources/Imaging/Microsoft.Psi.Imaging.Linux/Microsoft.Psi.Imaging.Linux.csproj index 2e472ba83..d005311ed 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging.Linux/Microsoft.Psi.Imaging.Linux.csproj +++ b/Sources/Imaging/Microsoft.Psi.Imaging.Linux/Microsoft.Psi.Imaging.Linux.csproj @@ -34,7 +34,7 @@ runtime; build; native; contentfiles; analyzers - + diff --git a/Sources/Imaging/Microsoft.Psi.Imaging.Windows/ImageFromStreamDecoder.cs b/Sources/Imaging/Microsoft.Psi.Imaging.Windows/ImageFromStreamDecoder.cs index 38c5549c2..68d480efd 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging.Windows/ImageFromStreamDecoder.cs +++ b/Sources/Imaging/Microsoft.Psi.Imaging.Windows/ImageFromStreamDecoder.cs @@ -4,6 +4,7 @@ namespace Microsoft.Psi.Imaging { using System.IO; + using System.IO.Compression; using System.Windows; using System.Windows.Media.Imaging; @@ -15,18 +16,35 @@ public class ImageFromStreamDecoder : IImageFromStreamDecoder /// public void DecodeFromStream(Stream stream, Image image) { - var decoder = BitmapDecoder.Create(stream, BitmapCreateOptions.PreservePixelFormat, BitmapCacheOption.Default); - BitmapSource bitmapSource = decoder.Frames[0]; - var fmt = bitmapSource.Format.ToPixelFormat(); - if (fmt != image.PixelFormat) + // GZip indentified by 1f8b header (see section 2.3.1 of RFC 1952 https://www.ietf.org/rfc/rfc1952.txt) + if (stream.Length >= 2 && stream.ReadByte() == 0x1f && stream.ReadByte() == 0x8b) { - using var img = Microsoft.Psi.Imaging.ImagePool.GetOrCreate(image.Width, image.Height, fmt); - bitmapSource.CopyPixels(Int32Rect.Empty, img.Resource.ImageData, img.Resource.Stride * img.Resource.Height, img.Resource.Stride); - img.Resource.CopyTo(image); + // decode GZip + stream.Position = 0; // advanced by if (... stream.ReadByte() ...) above + var size = image.Stride * image.Height; + using var decompressor = new GZipStream(stream, CompressionMode.Decompress); + unsafe + { + decompressor.CopyTo(new UnmanagedMemoryStream((byte*)image.ImageData.ToPointer(), size, size, FileAccess.ReadWrite)); + } } else - { - bitmapSource.CopyPixels(Int32Rect.Empty, image.ImageData, image.Stride * image.Height, image.Stride); + { + // decode JPEG, PNG, ... + stream.Position = 0; // advanced by if (... stream.ReadByte() ...) above + var decoder = BitmapDecoder.Create(stream, BitmapCreateOptions.PreservePixelFormat, BitmapCacheOption.Default); + BitmapSource bitmapSource = decoder.Frames[0]; + var fmt = bitmapSource.Format.ToPixelFormat(); + if (fmt != image.PixelFormat) + { + using var img = Microsoft.Psi.Imaging.ImagePool.GetOrCreate(image.Width, image.Height, fmt); + bitmapSource.CopyPixels(Int32Rect.Empty, img.Resource.ImageData, img.Resource.Stride * img.Resource.Height, img.Resource.Stride); + img.Resource.CopyTo(image); + } + else + { + bitmapSource.CopyPixels(Int32Rect.Empty, image.ImageData, image.Stride * image.Height, image.Stride); + } } } diff --git a/Sources/Imaging/Microsoft.Psi.Imaging.Windows/ImageToGZipStreamEncoder.cs b/Sources/Imaging/Microsoft.Psi.Imaging.Windows/ImageToGZipStreamEncoder.cs new file mode 100644 index 000000000..c18546c8c --- /dev/null +++ b/Sources/Imaging/Microsoft.Psi.Imaging.Windows/ImageToGZipStreamEncoder.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Imaging +{ + using System.IO; + using System.IO.Compression; + + /// + /// Implements an image encoder for GZip format. + /// + public class ImageToGZipStreamEncoder : IImageToStreamEncoder + { + /// + public void EncodeToStream(Image image, Stream stream) + { + unsafe + { + var size = image.Stride * image.Height; + var imageData = new UnmanagedMemoryStream((byte*)image.ImageData.ToPointer(), size); + using var compressor = new GZipStream(stream, CompressionMode.Compress, true); + imageData.CopyTo(compressor); + } + } + } +} \ No newline at end of file diff --git a/Sources/Imaging/Microsoft.Psi.Imaging.Windows/ImagingOperators.cs b/Sources/Imaging/Microsoft.Psi.Imaging.Windows/ImagingOperators.cs index fe0004d80..895c1c3cf 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging.Windows/ImagingOperators.cs +++ b/Sources/Imaging/Microsoft.Psi.Imaging.Windows/ImagingOperators.cs @@ -23,7 +23,7 @@ public static IProducer> EncodeJpeg(this IProducer /// Encodes an image to a PNG format. /// - /// A producer of images to encoder. + /// A producer of images to encode. /// An optional delivery policy. /// A producer that generates the PNG images. public static IProducer> EncodePng(this IProducer> source, DeliveryPolicy> deliveryPolicy = null) @@ -31,6 +31,17 @@ public static IProducer> EncodePng(this IProducer + /// Encodes an image to a GZIP format. + /// + /// A producer of images to encode. + /// An optional delivery policy. + /// A producer that generates the GZipped images. + public static IProducer> EncodeGZip(this IProducer> source, DeliveryPolicy> deliveryPolicy = null) + { + return source.Encode(new ImageToGZipStreamEncoder(), deliveryPolicy); + } + /// /// Decodes an encoded image. /// diff --git a/Sources/Imaging/Microsoft.Psi.Imaging.Windows/Microsoft.Psi.Imaging.Windows.csproj b/Sources/Imaging/Microsoft.Psi.Imaging.Windows/Microsoft.Psi.Imaging.Windows.csproj index 761a15724..43258eb31 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging.Windows/Microsoft.Psi.Imaging.Windows.csproj +++ b/Sources/Imaging/Microsoft.Psi.Imaging.Windows/Microsoft.Psi.Imaging.Windows.csproj @@ -10,12 +10,14 @@ ..\..\..\Build\Microsoft.Psi.ruleset true + true bin\Debug\net472\Microsoft.Psi.Imaging.Windows.xml ..\..\..\Build\Microsoft.Psi.ruleset true + true diff --git a/Sources/Imaging/Microsoft.Psi.Imaging/DepthImage.cs b/Sources/Imaging/Microsoft.Psi.Imaging/DepthImage.cs index 319fed719..54b6288a5 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging/DepthImage.cs +++ b/Sources/Imaging/Microsoft.Psi.Imaging/DepthImage.cs @@ -4,9 +4,8 @@ namespace Microsoft.Psi.Imaging { using System; - using System.Diagnostics; using System.Drawing; - using System.Drawing.Imaging; + using System.Drawing.Imaging; using Microsoft.Psi.Common; using Microsoft.Psi.Serialization; @@ -287,6 +286,35 @@ public ushort GetPixel(int x, int y) } } + /// + /// Try to gets the value of a pixel in the depth image. + /// + /// Pixel's X coordinate. + /// Pixel's Y coordinate. + /// The output value of the pixel. + /// True if a pixel value is returned, otherwise false. + public bool TryGetPixel(int x, int y, out ushort value) + { + value = 0; + + if (x < 0 || x >= this.Width) + { + return false; + } + + if (y < 0 || y >= this.Height) + { + return false; + } + + unsafe + { + value = *(ushort*)((byte*)this.ImageData.ToPointer() + y * this.Stride + x * this.BitsPerPixel / 8); + } + + return true; + } + /// /// Gets the range of values in the depth image. /// diff --git a/Sources/Imaging/Microsoft.Psi.Imaging/DepthImageDecoder.cs b/Sources/Imaging/Microsoft.Psi.Imaging/DepthImageDecoder.cs index c3fe58daa..4ee88e202 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging/DepthImageDecoder.cs +++ b/Sources/Imaging/Microsoft.Psi.Imaging/DepthImageDecoder.cs @@ -16,7 +16,7 @@ public class DepthImageDecoder : ConsumerProducer, Sha /// /// Initializes a new instance of the class. /// - /// Pipeline to add this component to. + /// The pipeline to add the component to. /// The depth image decoder to use. public DepthImageDecoder(Pipeline pipeline, IDepthImageFromStreamDecoder decoder) : base(pipeline) diff --git a/Sources/Imaging/Microsoft.Psi.Imaging/DepthImageEncoder.cs b/Sources/Imaging/Microsoft.Psi.Imaging/DepthImageEncoder.cs index 48f6229cc..e4693f614 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging/DepthImageEncoder.cs +++ b/Sources/Imaging/Microsoft.Psi.Imaging/DepthImageEncoder.cs @@ -16,7 +16,7 @@ public class DepthImageEncoder : ConsumerProducer, Shared /// Initializes a new instance of the class. /// - /// Pipeline to add this component to. + /// The pipeline to add the component to. /// The depth image encoder to use. public DepthImageEncoder(Pipeline pipeline, IDepthImageToStreamEncoder encoder) : base(pipeline) diff --git a/Sources/Imaging/Microsoft.Psi.Imaging/EncodedDepthImage.cs b/Sources/Imaging/Microsoft.Psi.Imaging/EncodedDepthImage.cs index 2300a078f..672988d29 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging/EncodedDepthImage.cs +++ b/Sources/Imaging/Microsoft.Psi.Imaging/EncodedDepthImage.cs @@ -94,6 +94,18 @@ public byte[] GetBuffer() return this.stream.GetBuffer(); } + /// + /// Sets the depth image data from a byte array. + /// + /// Byte array containing the image data. + /// The offset in buffer at which to begin copying bytes. + /// The maximum number of bytes to copy. + public void SetBuffer(byte[] buffer, int offset, int count) + { + this.stream.Position = 0; + this.stream.Write(buffer, offset, count); + } + /// /// Encodes a specified depth image with a specified encoder into the current encoded image. /// diff --git a/Sources/Imaging/Microsoft.Psi.Imaging/EncodedImage.cs b/Sources/Imaging/Microsoft.Psi.Imaging/EncodedImage.cs index f37620878..6a6d00829 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging/EncodedImage.cs +++ b/Sources/Imaging/Microsoft.Psi.Imaging/EncodedImage.cs @@ -54,6 +54,11 @@ public EncodedImage(int width, int height, PixelFormat pixelFormat) /// public PixelFormat PixelFormat => this.pixelFormat; + /// + /// Gets the size of the encoded image in bytes. + /// + public int Size => this.stream != null ? (int)this.stream.Length : 0; + /// /// Releases the image. /// @@ -88,6 +93,18 @@ public byte[] GetBuffer() return this.stream.GetBuffer(); } + /// + /// Sets the image data to byte array. + /// + /// Byte array containing the image data. + /// The offset in buffer at which to begin copying bytes. + /// The maximum number of bytes to copy. + public void SetBuffer(byte[] buffer, int offset, int count) + { + this.stream.SetLength(0); + this.stream.Write(buffer, offset, count); + } + /// /// Encodes a specified image with a specified encoder into the current encoded image. /// diff --git a/Sources/Imaging/Microsoft.Psi.Imaging/Image.cs b/Sources/Imaging/Microsoft.Psi.Imaging/Image.cs index 2d7168a85..ed15fde1d 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging/Image.cs +++ b/Sources/Imaging/Microsoft.Psi.Imaging/Image.cs @@ -4,9 +4,9 @@ namespace Microsoft.Psi.Imaging { using System; - using System.Diagnostics; using System.Drawing; using System.Drawing.Imaging; + using System.IO; using Microsoft.Psi.Common; using Microsoft.Psi.Serialization; @@ -123,6 +123,46 @@ public static Image FromBitmap(Bitmap bitmap) return image; } + /// + /// Creates a new from the specified file. + /// + /// The name of the file from which to create the . + /// A new created from the specified file. + public static Image FromFile(string filename) + { + // Create the Bitmap using Image.FromStream instead of FromFile as FromFile does not release the file handle. + // Though the remarks in the doc https://docs.microsoft.com/en-us/dotnet/api/system.drawing.image.fromstream + // state that the stream must be kept open for the lifetime of the image, we effectively create a copy of the + // image data in the call to FromBitmap, so it is safe to dispose the FileStream upon exiting this method. + using var fileStream = new FileStream(filename, FileMode.Open, FileAccess.Read); + return FromBitmap((Bitmap)Bitmap.FromStream(fileStream)); + } + + /// + /// Saves this to the specified file. + /// + /// The name of the file to which to save the . + public void Save(string filename) + { + if (this.PixelFormat == PixelFormat.Gray_16bpp || this.PixelFormat == PixelFormat.RGBA_64bpp) + { + throw new NotSupportedException($"Saving {this.PixelFormat} images is not currently supported. Convert to a supported format such as 8bpp grayscale or 24/32bpp color prior to saving."); + } + + // There is no equivalent system pixel format for RGB_24bpp, so convert to BGR_24bpp then save + if (this.PixelFormat == PixelFormat.RGB_24bpp) + { + int stride = 4 * ((this.Width * 3 + 3) / 2); // Rounding to nearest word boundary + using var tmpImage = new Image(this.Width, this.Height, stride, PixelFormat.BGR_24bpp); + this.CopyTo(tmpImage); + tmpImage.Save(filename); + } + else + { + this.ToBitmap().Save(filename); + } + } + /// /// Copies the image contents from a specified source locked bitmap data. /// diff --git a/Sources/Imaging/Microsoft.Psi.Imaging/ImageBase.cs b/Sources/Imaging/Microsoft.Psi.Imaging/ImageBase.cs index 121917863..4a4b9e194 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging/ImageBase.cs +++ b/Sources/Imaging/Microsoft.Psi.Imaging/ImageBase.cs @@ -332,6 +332,29 @@ public void CopyTo(IntPtr destination, int width, int height, int stride, PixelF }); } } + else if ((this.pixelFormat == PixelFormat.Gray_8bpp) && + (pixelFormat == PixelFormat.BGRA_32bpp)) + { + unsafe + { + byte* src = (byte*)this.image.Data.ToPointer(); + byte* dst = (byte*)destination.ToPointer(); + + Parallel.For(0, this.Height, i => + { + byte* srcCopy = src + (this.stride * i); + byte* dstCopy = dst + (stride * i); + for (int j = 0; j < this.width; j++) + { + // dest = (src << 24) | (src << 16) | (src << 8) | 0xff + *dstCopy++ = *srcCopy; + *dstCopy++ = *srcCopy; + *dstCopy++ = *srcCopy++; + *dstCopy++ = 0xff; // alpha + } + }); + } + } else { this.CopyImageSlow(this.image.Data, this.pixelFormat, destination, stride, pixelFormat); @@ -632,6 +655,9 @@ public abstract class CustomSerializer : ISerializer { private const int Version = 4; + /// + public bool? IsClearRequired => true; + /// /// Gets the type schema. /// diff --git a/Sources/Imaging/Microsoft.Psi.Imaging/ImageDecoder.cs b/Sources/Imaging/Microsoft.Psi.Imaging/ImageDecoder.cs index 11af2bcc9..5ba7bfa34 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging/ImageDecoder.cs +++ b/Sources/Imaging/Microsoft.Psi.Imaging/ImageDecoder.cs @@ -17,7 +17,7 @@ public class ImageDecoder : ConsumerProducer, Shared /// /// Initializes a new instance of the class. /// - /// Pipeline to add this component to. + /// The pipeline to add the component to. /// The image decoder to use. public ImageDecoder(Pipeline pipeline, IImageFromStreamDecoder decoder) : base(pipeline) diff --git a/Sources/Imaging/Microsoft.Psi.Imaging/ImageEncoder.cs b/Sources/Imaging/Microsoft.Psi.Imaging/ImageEncoder.cs index c6e746302..1b97c3467 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging/ImageEncoder.cs +++ b/Sources/Imaging/Microsoft.Psi.Imaging/ImageEncoder.cs @@ -16,7 +16,7 @@ public class ImageEncoder : ConsumerProducer, Shared /// /// Initializes a new instance of the class. /// - /// Pipeline to add this component to. + /// The pipeline to add the component to. /// The image encoder to use. public ImageEncoder(Pipeline pipeline, IImageToStreamEncoder encoder) : base(pipeline) diff --git a/Sources/Imaging/Microsoft.Psi.Imaging/ImageExtensions.cs b/Sources/Imaging/Microsoft.Psi.Imaging/ImageExtensions.cs index 142ee54ec..9d49fe298 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging/ImageExtensions.cs +++ b/Sources/Imaging/Microsoft.Psi.Imaging/ImageExtensions.cs @@ -6,6 +6,7 @@ namespace Microsoft.Psi.Imaging using System; using System.Drawing; using System.Drawing.Imaging; + using System.Runtime.CompilerServices; using System.Threading.Tasks; /// @@ -339,7 +340,7 @@ public static Image Scale(this Image image, float scaleX, float scaleY, Sampling { int scaledWidth = (int)Math.Abs(image.Width * scaleX); int scaledHeight = (int)Math.Abs(image.Height * scaleY); - Image destImage = new Image(scaledWidth, scaledHeight, image.PixelFormat); + var destImage = new Image(scaledWidth, scaledHeight, image.PixelFormat); image.Resize(destImage, scaledWidth, scaledHeight, mode); return destImage; } @@ -481,7 +482,7 @@ public static bool Compare(this ImageBase image1, ImageBase image2, double toler } } - errorMetrics.AvgError /= (double)(image1.Width * image1.Height); + errorMetrics.AvgError /= (double)image1.Width * image1.Height; errorMetrics.MaxError = Math.Sqrt(errorMetrics.MaxError); return errorMetrics.NumberOutliers <= percentOutliersAllowed * image1.Width * image1.Height; @@ -531,9 +532,9 @@ public static void Resize(this Image image, Image destImage, float newWidth, flo if (image.PixelFormat == PixelFormat.Gray_8bpp || image.PixelFormat == PixelFormat.RGB_24bpp) { int stride = 4 * ((image.Width * 3 + 3) / 2); // Rounding to nearest word boundary - using Image tmpImage = new Image(image.Width, image.Height, stride, PixelFormat.BGR_24bpp); + using var tmpImage = new Image(image.Width, image.Height, stride, PixelFormat.BGR_24bpp); image.CopyTo(tmpImage); - using Image resizedImage = new Image((int)newWidth, (int)newHeight, PixelFormat.BGR_24bpp); + using var resizedImage = new Image((int)newWidth, (int)newHeight, PixelFormat.BGR_24bpp); tmpImage.Resize(resizedImage, newWidth, newHeight, mode); destImage.CopyFrom(resizedImage); return; @@ -588,7 +589,7 @@ public static void Resize(this Image image, Image destImage, float newWidth, flo /// Returns a new image resized to the specified width/height. public static Image Resize(this Image image, int finalWidth, int finalHeight, SamplingMode samplingMode = SamplingMode.Bilinear) { - Image destImage = new Image(finalWidth, finalHeight, image.PixelFormat); + var destImage = new Image(finalWidth, finalHeight, image.PixelFormat); image.Resize(destImage, finalWidth, finalHeight, samplingMode); return destImage; } @@ -603,12 +604,8 @@ public static Image Resize(this Image image, int finalWidth, int finalHeight, Sa /// Rotated image. public static Image Rotate(this Image image, float angleInDegrees, SamplingMode mode, RotationFitMode fit = RotationFitMode.Tight) { - int rotatedWidth; - int rotatedHeight; - float originx; - float originy; - DetermineRotatedWidthHeight(image.Width, image.Height, angleInDegrees, fit, out rotatedWidth, out rotatedHeight, out originx, out originy); - Image rotatedImage = new Image(rotatedWidth, rotatedHeight, image.PixelFormat); + DetermineRotatedWidthHeight(image.Width, image.Height, angleInDegrees, fit, out int rotatedWidth, out int rotatedHeight, out _, out _); + var rotatedImage = new Image(rotatedWidth, rotatedHeight, image.PixelFormat); image.Rotate(rotatedImage, angleInDegrees, mode, fit); return rotatedImage; } @@ -635,10 +632,7 @@ public static void Rotate(this Image image, Image destImage, float angleInDegree "Convert to a supported format such as 8bpp grayscale or 24/32bpp color first."); } - int rotatedWidth; - int rotatedHeight; - float originx, originy; - DetermineRotatedWidthHeight(image.Width, image.Height, angleInDegrees, fit, out rotatedWidth, out rotatedHeight, out originx, out originy); + DetermineRotatedWidthHeight(image.Width, image.Height, angleInDegrees, fit, out int rotatedWidth, out int rotatedHeight, out float originx, out float originy); if (rotatedWidth != destImage.Width || rotatedHeight != destImage.Height) { @@ -657,9 +651,9 @@ public static void Rotate(this Image image, Image destImage, float angleInDegree if (image.PixelFormat == PixelFormat.Gray_8bpp || image.PixelFormat == PixelFormat.RGB_24bpp) { int stride = 4 * ((image.Width * 3 + 3) / 2); // Rounding to nearest word boundary - using Image tmpImage = new Image(image.Width, image.Height, stride, PixelFormat.BGR_24bpp); + using var tmpImage = new Image(image.Width, image.Height, stride, PixelFormat.BGR_24bpp); image.CopyTo(tmpImage); - using Image rotatedImage = new Image(rotatedWidth, rotatedHeight, PixelFormat.BGR_24bpp); + using var rotatedImage = new Image(rotatedWidth, rotatedHeight, PixelFormat.BGR_24bpp); tmpImage.Rotate(rotatedImage, angleInDegrees, mode, fit); destImage.CopyFrom(rotatedImage); return; @@ -711,57 +705,93 @@ public static void Rotate(this Image image, Image destImage, float angleInDegree /// The top of the region to crop. /// The width of the region to crop. /// The height of the region to crop. + /// An optional parameter indicating whether to clip the region (by default false). /// The cropped image. - public static Image Crop(this Image image, int left, int top, int width, int height) + public static Image Crop(this Image image, int left, int top, int width, int height, bool clip = false) => + image.Crop(new Rectangle(left, top, width, height), clip); + + /// + /// Creates a copy of the image cropped to the specified rectangle. + /// + /// Image to crop. + /// The rectangle to crop. + /// An optional parameter indicating whether to clip the rectangle (by default false). + /// The cropped image. + public static Image Crop(this Image image, Rectangle rectangle, bool clip = false) { - Image croppedImage = new Image(width, height, image.PixelFormat); - image.Crop(croppedImage, left, top, width, height); - return croppedImage; + var actualRectangle = clip ? GetImageSizeClippedRectangle(rectangle, image.Width, image.Height) : rectangle; + if (actualRectangle.IsEmpty) + { + return null; + } + else + { + var croppedImage = new Image(actualRectangle.Width, actualRectangle.Height, image.PixelFormat); + image.Crop(croppedImage, actualRectangle, clip: false); + return croppedImage; + } } + /// + /// Creates a copy of the image cropped to the specified rectangle. + /// + /// Image to crop. + /// Destination image that cropped area is copied to. + /// The left of the rectangle to crop. + /// The top of the rectangle to crop. + /// The width of the rectangle to crop. + /// The height of the rectangle to crop. + /// An optional parameter indicating whether to clip the region (by default false). + public static void Crop(this Image image, Image croppedImage, int left, int top, int width, int height, bool clip = false) => + image.Crop(croppedImage, new Rectangle(left, top, width, height), clip); + /// /// Creates a copy of the image cropped to the specified dimensions. /// /// Image to crop. /// Destination image that cropped area is copied to. - /// The left of the region to crop. - /// The top of the region to crop. - /// The width of the region to crop. - /// The height of the region to crop. - /// The cropped image. - public static Image Crop(this Image image, Image croppedImage, int left, int top, int width, int height) + /// The rectangle to crop. + /// An optional parameter indicating whether to clip the region (by default false). + public static void Crop(this Image image, Image croppedImage, Rectangle rectangle, bool clip = false) { if (croppedImage.PixelFormat != image.PixelFormat) { throw new ArgumentOutOfRangeException("croppedImage.PixelFormat", "destination image pixel format doesn't match source image pixel format"); } - if (croppedImage.Width < width) + var actualRectangle = clip ? GetImageSizeClippedRectangle(rectangle, image.Width, image.Height) : rectangle; + + if (actualRectangle.IsEmpty) + { + return; + } + + if (croppedImage.Width < actualRectangle.Width) { throw new ArgumentOutOfRangeException("croppedImage.Width", "destination image width is too small"); } - if (croppedImage.Height < height) + if (croppedImage.Height < actualRectangle.Height) { throw new ArgumentOutOfRangeException("croppedImage.Height", "destination image height is too small"); } - if ((left < 0) || (left >= image.Width)) + if ((actualRectangle.Left < 0) || (actualRectangle.Left >= image.Width)) { throw new ArgumentOutOfRangeException("left", "left is out of range"); } - if ((top < 0) || (top >= image.Height)) + if ((actualRectangle.Top < 0) || (actualRectangle.Top >= image.Height)) { throw new ArgumentOutOfRangeException("top", "top is out of range"); } - if ((width < 0) || ((left + width) > image.Width)) + if ((actualRectangle.Width < 0) || ((actualRectangle.Left + actualRectangle.Width) > image.Width)) { throw new ArgumentOutOfRangeException("width", "width is out of range"); } - if ((height < 0) || ((top + height) > image.Height)) + if ((actualRectangle.Height < 0) || ((actualRectangle.Top + actualRectangle.Height) > image.Height)) { throw new ArgumentOutOfRangeException("height", "height is out of range"); } @@ -773,14 +803,14 @@ public static Image Crop(this Image image, Image croppedImage, int left, int top int bytesPerPixel = image.BitsPerPixel / 8; // Compute the number of bytes in each line of the crop region - int copyLength = width * bytesPerPixel; + int copyLength = actualRectangle.Width * bytesPerPixel; // Start at top-left of region to crop - byte* src = (byte*)image.ImageData.ToPointer() + (top * image.Stride) + (left * bytesPerPixel); + byte* src = (byte*)image.ImageData.ToPointer() + (actualRectangle.Top * image.Stride) + (actualRectangle.Left * bytesPerPixel); byte* dst = (byte*)croppedImage.ImageData.ToPointer(); // Copy line by line - for (int i = 0; i < height; i++) + for (int i = 0; i < actualRectangle.Height; i++) { Buffer.MemoryCopy(src, dst, copyLength, copyLength); @@ -788,92 +818,120 @@ public static Image Crop(this Image image, Image croppedImage, int left, int top dst += croppedImage.Stride; } } - - return croppedImage; } /// /// Creates a copy of the depth image cropped to the specified dimensions. /// - /// Depth image to crop. + /// Depth image to crop. /// The left of the region to crop. /// The top of the region to crop. /// The width of the region to crop. /// The height of the region to crop. + /// An optional parameter indicating whether to clip the region (by default false). + /// The cropped depth image. + public static DepthImage Crop(this DepthImage depthImage, int left, int top, int width, int height, bool clip = false) => + depthImage.Crop(new Rectangle(left, top, width, height), clip); + + /// + /// Creates a copy of the depth image cropped to the specified rectangle. + /// + /// Depth image to crop. + /// The rectangle region to crop. + /// An optional parameter indicating whether to clip the rectangle to the image boundaries (by default false). /// The cropped depth image. - public static DepthImage Crop(this DepthImage image, int left, int top, int width, int height) + public static DepthImage Crop(this DepthImage depthImage, Rectangle rectangle, bool clip = false) { - DepthImage croppedImage = new DepthImage(width, height, image.Stride); - image.Crop(croppedImage, left, top, width, height); - return croppedImage; + var actualRectangle = clip ? GetImageSizeClippedRectangle(rectangle, depthImage.Width, depthImage.Height) : rectangle; + var croppedDepthImage = new DepthImage(actualRectangle.Width, actualRectangle.Height, depthImage.Stride); + depthImage.Crop(croppedDepthImage, actualRectangle, clip: false); + return croppedDepthImage; } /// - /// Creates a copy of the image cropped to the specified dimensions. + /// Creates a copy of the depth image cropped to the specified dimensions. /// - /// Image to crop. - /// Destination image that cropped area is copied to. + /// Image to crop. + /// Destination image that cropped area is copied to. /// The left of the region to crop. /// The top of the region to crop. /// The width of the region to crop. /// The height of the region to crop. - public static void Crop(this DepthImage image, DepthImage croppedImage, int left, int top, int width, int height) + /// An optional parameter indicating whether to clip the region (by default false). + public static void Crop(this DepthImage depthImage, DepthImage croppedDepthImage, int left, int top, int width, int height, bool clip = false) => + depthImage.Crop(croppedDepthImage, new Rectangle(left, top, width, height), clip); + + /// + /// Creates a copy of the depth image cropped to the specified rectangle. + /// + /// Image to crop. + /// Destination image that cropped area is copied to. + /// The rectangle region to crop. + /// An optional parameter indicating whether to clip the rectangle to the image boundaries (by default false). + public static void Crop(this DepthImage depthImage, DepthImage croppedDepthImage, Rectangle rectangle, bool clip = false) { - if (croppedImage.PixelFormat != image.PixelFormat) + if (croppedDepthImage.PixelFormat != depthImage.PixelFormat) { - throw new ArgumentOutOfRangeException("croppedImage.PixelFormat", "destination image pixel format doesn't match source image pixel format"); + throw new ArgumentOutOfRangeException($"{nameof(croppedDepthImage)}.PixelFormat", "destination image pixel format doesn't match source image pixel format"); } - if (croppedImage.Width < width) + var actualRectangle = clip ? GetImageSizeClippedRectangle(rectangle, depthImage.Width, depthImage.Height) : rectangle; + + if (actualRectangle.IsEmpty) { - throw new ArgumentOutOfRangeException("croppedImage.Width", "destination image width is too small"); + return; } - if (croppedImage.Height < height) + if (croppedDepthImage.Width < actualRectangle.Width) { - throw new ArgumentOutOfRangeException("croppedImage.Height", "destination image height is too small"); + throw new ArgumentOutOfRangeException($"{nameof(croppedDepthImage)}.Width", "destination image width is too small"); } - if ((left < 0) || (left > (image.Width - 1))) + if (croppedDepthImage.Height < actualRectangle.Height) + { + throw new ArgumentOutOfRangeException($"{nameof(croppedDepthImage)}.Height", "destination image height is too small"); + } + + if ((actualRectangle.Left < 0) || (actualRectangle.Left >= depthImage.Width)) { throw new ArgumentOutOfRangeException("left", "left is out of range"); } - if ((top < 0) || (top > (image.Height - 1))) + if ((actualRectangle.Top < 0) || (actualRectangle.Top >= depthImage.Height)) { throw new ArgumentOutOfRangeException("top", "top is out of range"); } - if ((width < 0) || ((left + width) > image.Width)) + if ((actualRectangle.Width < 0) || ((actualRectangle.Left + actualRectangle.Width) > depthImage.Width)) { throw new ArgumentOutOfRangeException("width", "width is out of range"); } - if ((height < 0) || ((top + height) > image.Height)) + if ((actualRectangle.Height < 0) || ((actualRectangle.Top + actualRectangle.Height) > depthImage.Height)) { throw new ArgumentOutOfRangeException("height", "height is out of range"); } // Cropped image will be returned as a new image - original (this) image is not modified - System.Diagnostics.Debug.Assert(croppedImage.ImageData != IntPtr.Zero, "Unexpected empty image"); + System.Diagnostics.Debug.Assert(croppedDepthImage.ImageData != IntPtr.Zero, "Unexpected empty image"); unsafe { - int bytesPerPixel = image.BitsPerPixel / 8; + int bytesPerPixel = depthImage.BitsPerPixel / 8; // Compute the number of bytes in each line of the crop region - int copyLength = width * bytesPerPixel; + int copyLength = actualRectangle.Width * bytesPerPixel; // Start at top-left of region to crop - byte* src = (byte*)image.ImageData.ToPointer() + (top * image.Stride) + (left * bytesPerPixel); - byte* dst = (byte*)croppedImage.ImageData.ToPointer(); + byte* src = (byte*)depthImage.ImageData.ToPointer() + (actualRectangle.Top * depthImage.Stride) + (actualRectangle.Left * bytesPerPixel); + byte* dst = (byte*)croppedDepthImage.ImageData.ToPointer(); // Copy line by line - for (int i = 0; i < height; i++) + for (int i = 0; i < actualRectangle.Height; i++) { Buffer.MemoryCopy(src, dst, copyLength, copyLength); - src += image.Stride; - dst += croppedImage.Stride; + src += depthImage.Stride; + dst += croppedDepthImage.Stride; } } } @@ -1063,8 +1121,8 @@ public static Color AverageColor(this Image image) /// A color with the average RGB values of the region. public static Color AverageColor(this Image image, int left, int top, int width, int height) { - var colorF = image.AverageColorF(left, top, width, height); - return Color.FromArgb((byte)Math.Round(255 * colorF.R), (byte)Math.Round(255 * colorF.G), (byte)Math.Round(255 * colorF.B)); + var (r, g, b) = image.AverageColorF(left, top, width, height); + return Color.FromArgb((byte)Math.Round(255 * r), (byte)Math.Round(255 * g), (byte)Math.Round(255 * b)); } /// @@ -1115,6 +1173,7 @@ public static (double R, double G, double B) AverageColorF(this Image image, int unsafe { byte* ptrFirstPixel = (byte*)image.ImageData.ToPointer(); + var bytesPerPixel = image.BitsPerPixel / 8; for (var y = top; y < (top + height); y++) { @@ -1122,7 +1181,7 @@ public static (double R, double G, double B) AverageColorF(this Image image, int for (var x = left; x < (left + width); x++) { - byte* ptrCurrentPixel = ptrCurrentRow + x * image.BitsPerPixel / 8; + byte* ptrCurrentPixel = ptrCurrentRow + x * bytesPerPixel; switch (image.PixelFormat) { @@ -1338,7 +1397,7 @@ public static void DrawCircle(this Image image, Point p0, int radius, Color colo /// Color to use when drawing text. Optional. /// Name of font to use. Optional. /// Size of font. Optional. - public static void DrawText(this Image image, string str, Point p0, Color color = default(Color), string font = "Arial", float fontSize = 24.0f) + public static void DrawText(this Image image, string str, Point p0, Color color = default, string font = "Arial", float fontSize = 24.0f) { if (image.PixelFormat == PixelFormat.Gray_16bpp || image.PixelFormat == PixelFormat.RGBA_64bpp) { @@ -1369,9 +1428,9 @@ public static void DrawText(this Image image, string str, Point p0, Color color font ??= "Arial"; using Bitmap bm = image.ToBitmap(false); using var graphics = Graphics.FromImage(bm); - using Font drawFont = new Font(font, fontSize); - using SolidBrush drawBrush = new SolidBrush(color); - using StringFormat drawFormat = new StringFormat(); + using var drawFont = new Font(font, fontSize); + using var drawBrush = new SolidBrush(color); + using var drawFormat = new StringFormat(); drawFormat.FormatFlags = 0; graphics.DrawString(str, drawFont, drawBrush, p0.X, p0.Y, drawFormat); } @@ -1417,10 +1476,10 @@ public static void DrawText(this Image image, string str, Point p0, Color backgr font ??= "Arial"; using Bitmap bm = image.ToBitmap(false); using var graphics = Graphics.FromImage(bm); - using Font drawFont = new Font(font, fontSize); - using SolidBrush textBrush = new SolidBrush(textColor); - using SolidBrush backgroundBrush = new SolidBrush(backgroundColor); - using StringFormat drawFormat = new StringFormat(); + using var drawFont = new Font(font, fontSize); + using var textBrush = new SolidBrush(textColor); + using var backgroundBrush = new SolidBrush(backgroundColor); + using var drawFormat = new StringFormat(); drawFormat.FormatFlags = 0; SizeF textSize = graphics.MeasureString(str, drawFont); @@ -1537,7 +1596,7 @@ public static void CopyTo(this Image srcImage, Image destImage, Image maskImage) throw new System.Exception(Image.ExceptionDescriptionSourceDestImageMismatch); } - Rectangle srcRect = new Rectangle(0, 0, srcImage.Width - 1, srcImage.Height - 1); + var srcRect = new Rectangle(0, 0, srcImage.Width - 1, srcImage.Height - 1); srcImage.CopyTo(srcRect, destImage, new Point(0, 0), maskImage); } @@ -1578,11 +1637,6 @@ public static void CopyTo(this Image srcImage, Rectangle srcRect, Image destImag throw new ArgumentOutOfRangeException("destImage.PixelFormat", "destination image pixel format doesn't match source image pixel format"); } - if (srcImage.Width != destImage.Width || srcImage.Height != destImage.Height) - { - throw new System.Exception(Image.ExceptionDescriptionSourceDestImageMismatch); - } - srcImage.CopyTo(srcRect, destImage, destTopLeftPoint, null); } @@ -1658,9 +1712,9 @@ public static void CopyTo(this Image srcImage, Rectangle srcRect, Image destImag PixelFormat srcFormat = srcImage.PixelFormat; PixelFormat dstFormat = destImage.PixelFormat; - System.IntPtr sourceBuffer = srcImage.ImageData; - System.IntPtr destBuffer = destImage.ImageData; - System.IntPtr maskBuffer = (maskImage != null) ? maskImage.ImageData : System.IntPtr.Zero; + var sourceBuffer = srcImage.ImageData; + var destBuffer = destImage.ImageData; + var maskBuffer = (maskImage != null) ? maskImage.ImageData : IntPtr.Zero; unsafe { int srcBytesPerPixel = srcFormat.GetBytesPerPixel(); @@ -1815,7 +1869,7 @@ public static partial class Operators /// Returns an new image with the inverted results. public static Image Invert(this Image srcImage) { - Image invertedImage = new Image(srcImage.Width, srcImage.Height, srcImage.PixelFormat); + var invertedImage = new Image(srcImage.Width, srcImage.Height, srcImage.PixelFormat); srcImage.Invert(invertedImage); return invertedImage; } @@ -1834,7 +1888,7 @@ public static void Invert(this Image srcImage, Image destImage) if (srcImage.Width != destImage.Width || srcImage.Height != destImage.Height) { - throw new System.Exception(Image.ExceptionDescriptionSourceDestImageMismatch); + throw new Exception(Image.ExceptionDescriptionSourceDestImageMismatch); } unsafe @@ -1903,72 +1957,51 @@ public static void Invert(this Image srcImage, Image destImage) /// Color to clear to. public static void Clear(this Image image, Color clr) { - unsafe + void ClearFast(byte b) { - int srcBytesPerPixel = image.PixelFormat.GetBytesPerPixel(); - byte* srcRow = (byte*)image.ImageData.ToPointer(); - for (int i = 0; i < image.Height; i++) + unsafe { - byte* srcCol = srcRow; - for (int j = 0; j < image.Width; j++) - { - switch (image.PixelFormat) - { - case PixelFormat.Gray_8bpp: - srcCol[0] = Operators.Rgb2Gray(clr.R, clr.G, clr.B); - break; - - case PixelFormat.Gray_16bpp: - ((ushort*)srcCol)[0] = Operators.Rgb2Gray( - (ushort)((clr.R << 8) | clr.R), - (ushort)((clr.G << 8) | clr.G), - (ushort)((clr.B << 8) | clr.B)); - break; - - case PixelFormat.BGR_24bpp: - srcCol[0] = clr.B; - srcCol[1] = clr.G; - srcCol[2] = clr.R; - break; + Unsafe.InitBlockUnaligned(image.ImageData.ToPointer(), b, (uint)image.Size); + } + } - case PixelFormat.BGRX_32bpp: - srcCol[0] = clr.B; - srcCol[1] = clr.G; - srcCol[2] = clr.R; - srcCol[3] = 255; - break; + switch (image.PixelFormat) + { + case PixelFormat.Gray_8bpp: + case PixelFormat.Gray_16bpp: + ClearFast(Rgb2Gray(clr.R, clr.G, clr.B)); + return; - case PixelFormat.BGRA_32bpp: - srcCol[0] = clr.B; - srcCol[1] = clr.G; - srcCol[2] = clr.R; - srcCol[3] = clr.A; - break; + case PixelFormat.BGR_24bpp: + if (clr.R == clr.G && clr.G == clr.B) + { + ClearFast(clr.R); + return; + } - case PixelFormat.RGB_24bpp: - srcCol[0] = clr.R; - srcCol[1] = clr.G; - srcCol[2] = clr.B; - break; + break; - case PixelFormat.RGBA_64bpp: - ((ushort*)srcCol)[0] = (ushort)((clr.R << 8) | clr.R); - ((ushort*)srcCol)[1] = (ushort)((clr.G << 8) | clr.G); - ((ushort*)srcCol)[2] = (ushort)((clr.B << 8) | clr.B); - ((ushort*)srcCol)[3] = (ushort)((clr.A << 8) | clr.A); - break; + case PixelFormat.BGRX_32bpp: + if (clr.R == clr.G && clr.G == clr.B && clr.B == 255) + { + ClearFast(255); + return; + } - case PixelFormat.Undefined: - default: - throw new ArgumentException(Image.ExceptionDescriptionUnexpectedPixelFormat); - } + break; - srcCol += srcBytesPerPixel; + case PixelFormat.BGRA_32bpp: + case PixelFormat.RGBA_64bpp: + if (clr.R == clr.G && clr.G == clr.B && clr.B == clr.A) + { + ClearFast(clr.R); + return; } - srcRow += image.Stride; - } + break; } + + ClearSlow(image, clr); } /// @@ -1979,7 +2012,7 @@ public static void Clear(this Image image, Color clr) /// Returns a new grayscale image containing the color from the specified channel in the original source image. public static Image ExtractChannel(this Image image, int channel) { - Image destImage = new Image(image.Width, image.Height, PixelFormat.Gray_8bpp); + var destImage = new Image(image.Width, image.Height, PixelFormat.Gray_8bpp); image.ExtractChannel(destImage, channel); return destImage; } @@ -2038,6 +2071,122 @@ public static void ExtractChannel(this Image image, Image destImage, int channel } } } + + /// + /// Clears each color component in an image to the specified color. + /// + /// Image to clear. + /// Color to clear to. + private static void ClearSlow(Image image, Color clr) + { + unsafe + { + switch (image.PixelFormat) + { + case PixelFormat.BGR_24bpp: + int srcBytesPerPixel = image.PixelFormat.GetBytesPerPixel(); + byte* srcRow = (byte*)image.ImageData.ToPointer(); + for (int i = 0; i < image.Height; i++) + { + byte* srcCol = srcRow; + for (int j = 0; j < image.Width; j++) + { + srcCol[0] = clr.B; + srcCol[1] = clr.G; + srcCol[2] = clr.R; + srcCol += srcBytesPerPixel; + } + + srcRow += image.Stride; + } + + break; + + case PixelFormat.BGRX_32bpp: + srcBytesPerPixel = image.PixelFormat.GetBytesPerPixel(); + srcRow = (byte*)image.ImageData.ToPointer(); + for (int i = 0; i < image.Height; i++) + { + byte* srcCol = srcRow; + for (int j = 0; j < image.Width; j++) + { + srcCol[0] = clr.B; + srcCol[1] = clr.G; + srcCol[2] = clr.R; + srcCol[3] = 255; + srcCol += srcBytesPerPixel; + } + + srcRow += image.Stride; + } + + break; + + case PixelFormat.BGRA_32bpp: + srcBytesPerPixel = image.PixelFormat.GetBytesPerPixel(); + srcRow = (byte*)image.ImageData.ToPointer(); + for (int i = 0; i < image.Height; i++) + { + byte* srcCol = srcRow; + for (int j = 0; j < image.Width; j++) + { + srcCol[0] = clr.B; + srcCol[1] = clr.G; + srcCol[2] = clr.R; + srcCol[3] = clr.A; + srcCol += srcBytesPerPixel; + } + + srcRow += image.Stride; + } + + break; + + case PixelFormat.RGB_24bpp: + srcBytesPerPixel = image.PixelFormat.GetBytesPerPixel(); + srcRow = (byte*)image.ImageData.ToPointer(); + for (int i = 0; i < image.Height; i++) + { + byte* srcCol = srcRow; + for (int j = 0; j < image.Width; j++) + { + srcCol[0] = clr.R; + srcCol[1] = clr.G; + srcCol[2] = clr.B; + srcCol += srcBytesPerPixel; + } + + srcRow += image.Stride; + } + + break; + + case PixelFormat.RGBA_64bpp: + srcBytesPerPixel = image.PixelFormat.GetBytesPerPixel(); + srcRow = (byte*)image.ImageData.ToPointer(); + for (int i = 0; i < image.Height; i++) + { + byte* srcCol = srcRow; + for (int j = 0; j < image.Width; j++) + { + ((ushort*)srcCol)[0] = (ushort)((clr.R << 8) | clr.R); + ((ushort*)srcCol)[1] = (ushort)((clr.G << 8) | clr.G); + ((ushort*)srcCol)[2] = (ushort)((clr.B << 8) | clr.B); + ((ushort*)srcCol)[3] = (ushort)((clr.A << 8) | clr.A); + srcCol += srcBytesPerPixel; + } + + srcRow += image.Stride; + } + + break; + + case PixelFormat.Undefined: + default: + throw new ArgumentException(Image.ExceptionDescriptionUnexpectedPixelFormat); + } + } + } } /// @@ -2055,7 +2204,7 @@ public static partial class Operators /// The thresholded image. public static Image Threshold(this Image image, int threshold, int maxvalue, Threshold type) { - Image thresholdedImage = new Image(image.Width, image.Height, image.PixelFormat); + var thresholdedImage = new Image(image.Width, image.Height, image.PixelFormat); image.Threshold(thresholdedImage, threshold, maxvalue, type); return thresholdedImage; } @@ -2240,7 +2389,7 @@ public static void Threshold(this Image srcImage, Image destImage, int threshold /// Difference image. public static Image AbsDiff(this Image imageA, Image imageB) { - Image diffImage = new Image(imageA.Width, imageA.Height, imageA.PixelFormat); + var diffImage = new Image(imageA.Width, imageA.Height, imageA.PixelFormat); imageA.AbsDiff(imageB, diffImage); return diffImage; } @@ -2347,5 +2496,148 @@ public static void AbsDiff(this Image imageA, Image imageB, Image destImage) } } } + + /// + /// Convolves an image with a specified kernel. + /// + /// The source image. + /// The kernel to convolve the image with. + /// An image contained the convolution results. + public static Image Convolve(this Image image, int[,] kernel) + { + var destination = new Image(image.Width, image.Height, image.PixelFormat); + image.Convolve(destination, kernel); + return destination; + } + + /// + /// Convolves an image with a specified kernel into a specified destination image. + /// + /// The source image. + /// The destination image. + /// The kernel to convolve the image with. + public static void Convolve(this Image image, Image destination, int[,] kernel) + { + if (image.PixelFormat != destination.PixelFormat) + { + throw new ArgumentOutOfRangeException("destination.PixelFormat", "Destination image pixel format doesn't match source image pixel format."); + } + + if (image.PixelFormat != PixelFormat.Gray_8bpp && + image.PixelFormat != PixelFormat.Gray_16bpp) + { + throw new NotSupportedException($"Currently the {nameof(Convolve)} operator only supports grayscale formats."); + } + + if (image.Width != destination.Width || image.Height != destination.Height) + { + throw new ArgumentException("Images sizes/types don't match"); + } + + int kernelHeightHalf = kernel.GetLength(0) / 2; + int kernelWidthHalf = kernel.GetLength(1) / 2; + + unsafe + { + int bytesPerPixel = image.PixelFormat.GetBytesPerPixel(); + byte* srcStart = (byte*)image.ImageData.ToPointer(); + byte* dst = (byte*)destination.ImageData.ToPointer(); + for (int i = 0; i < image.Height; i++) + { + byte* dstCol = dst; + for (int j = 0; j < image.Width; j++) + { + var accumulator = 0; + var count = 0; + for (int ki = 0; ki < kernel.GetLength(0); ki++) + { + var row = i - kernelHeightHalf + ki; + if ((row >= 0) && (row < image.Height)) + { + var srcRow = srcStart + image.Stride * row; + for (int kj = 0; kj < kernel.GetLength(1); kj++) + { + var col = j - kernelWidthHalf + kj; + if ((col >= 0) && (col < image.Width)) + { + var srcCol = srcRow + col * bytesPerPixel; + switch (image.PixelFormat) + { + case PixelFormat.Gray_8bpp: + accumulator += srcCol[0] * kernel[ki, kj]; + count += 1; + break; + case PixelFormat.Gray_16bpp: + accumulator += ((ushort*)srcCol)[0] * kernel[ki, kj]; + count += 1; + break; + } + } + } + } + } + + switch (destination.PixelFormat) + { + case PixelFormat.Gray_16bpp: + ((ushort*)dstCol)[0] = (ushort)(accumulator / count); + break; + case PixelFormat.Gray_8bpp: + dstCol[0] = (byte)(accumulator / count); + break; + } + + dstCol += bytesPerPixel; + } + + dst += destination.Stride; + } + } + } + + /// + /// Clips a rectangle based on the size of an image. + /// + /// The rectangle to clip. + /// The image width. + /// The image height. + /// The clipped rectangle. + internal static Rectangle GetImageSizeClippedRectangle(Rectangle rectangle, int imageWidth, int imageHeight) + { + if ((rectangle.Left >= imageWidth) || + (rectangle.Top >= imageHeight) || + (rectangle.Width < 0) || + (rectangle.Height < 0)) + { + return Rectangle.Empty; + } + + var actualLeft = rectangle.Left; + var actualRight = rectangle.Left + rectangle.Width; + var actualTop = rectangle.Top; + var actualBottom = rectangle.Top + rectangle.Height; + + if (actualLeft < 0) + { + actualLeft = 0; + } + + if (actualTop < 0) + { + actualTop = 0; + } + + if (actualRight > imageWidth) + { + actualRight = imageWidth; + } + + if (actualBottom > imageHeight) + { + actualBottom = imageHeight; + } + + return new Rectangle(actualLeft, actualTop, actualRight - actualLeft, actualBottom - actualTop); + } } } \ No newline at end of file diff --git a/Sources/Imaging/Microsoft.Psi.Imaging/StreamOperators.cs b/Sources/Imaging/Microsoft.Psi.Imaging/StreamOperators.cs index ac7dca888..ddc86a954 100644 --- a/Sources/Imaging/Microsoft.Psi.Imaging/StreamOperators.cs +++ b/Sources/Imaging/Microsoft.Psi.Imaging/StreamOperators.cs @@ -46,9 +46,9 @@ public static IProducer> ToImage(this IProducer return source.Process, Shared>( (sharedDepthImage, envelope, emitter) => { - using var sharedImage = sharedDepthImageAllocator(sharedDepthImage.Resource.Width, sharedDepthImage.Resource.Height); - sharedImage.Resource.CopyFrom(sharedDepthImage.Resource); - emitter.Post(sharedImage, envelope.OriginatingTime); + using var sharedImage = sharedDepthImageAllocator(sharedDepthImage.Resource.Width, sharedDepthImage.Resource.Height); + sharedImage.Resource.CopyFrom(sharedDepthImage.Resource); + emitter.Post(sharedImage, envelope.OriginatingTime); }, deliveryPolicy); } @@ -61,9 +61,29 @@ public static IProducer> ToImage(this IProducer /// An optional delivery policy. /// Optional image allocator for creating new shared image. /// The resulting stream. - public static IProducer> ToPixelFormat(this IProducer> source, PixelFormat pixelFormat, DeliveryPolicy> deliveryPolicy = null, Func> sharedImageAllocator = null) - { - return source.PipeTo(new ToPixelFormat(source.Out.Pipeline, pixelFormat, sharedImageAllocator), deliveryPolicy); + public static IProducer> Convert(this IProducer> source, PixelFormat pixelFormat, DeliveryPolicy> deliveryPolicy = null, Func> sharedImageAllocator = null) + { + sharedImageAllocator ??= (width, height, pixelFormat) => ImagePool.GetOrCreate(width, height, pixelFormat); + return source.Process, Shared>( + (sharedImage, envelope, emitter) => + { + // if the image is null, post null + if (sharedImage == null) + { + emitter.Post(null, envelope.OriginatingTime); + } + else if (pixelFormat == sharedImage.Resource.PixelFormat) + { + // o/w if image is already in the requested format, shortcut the conversion + emitter.Post(sharedImage, envelope.OriginatingTime); + } + else + { + using var image = sharedImageAllocator(sharedImage.Resource.Width, sharedImage.Resource.Height, pixelFormat); + sharedImage.Resource.CopyTo(image.Resource); + emitter.Post(image, envelope.OriginatingTime); + } + }, deliveryPolicy); } /// @@ -81,50 +101,148 @@ public static IProducer> Transform(this IProducer> s } /// - /// Crops a shared depth image using the specified rectangle. + /// Crops a shared image using the specified rectangle. /// - /// Source of image and rectangle samples. + /// Source of image and rectangle messages. + /// An optional parameter indicating whether to clip the region (by default false). /// An optional delivery policy. /// Optional image allocator to create new shared image. /// Returns a producer generating new cropped image samples. - public static IProducer> Crop(this IProducer<(Shared, Rectangle)> source, DeliveryPolicy<(Shared, Rectangle)> deliveryPolicy = null, Func> sharedImageAllocator = null) + public static IProducer> Crop( + this IProducer<(Shared, Rectangle)> source, + bool clip = false, + DeliveryPolicy<(Shared, Rectangle)> deliveryPolicy = null, + Func> sharedImageAllocator = null) { sharedImageAllocator ??= ImagePool.GetOrCreate; return source.Process<(Shared, Rectangle), Shared>( (tupleOfSharedImageAndRectangle, envelope, emitter) => { - using var croppedSharedImage = sharedImageAllocator(tupleOfSharedImageAndRectangle.Item2.Width, tupleOfSharedImageAndRectangle.Item2.Height, tupleOfSharedImageAndRectangle.Item1.Resource.PixelFormat); - tupleOfSharedImageAndRectangle.Item1.Resource.Crop( - croppedSharedImage.Resource, - tupleOfSharedImageAndRectangle.Item2.Left, - tupleOfSharedImageAndRectangle.Item2.Top, - tupleOfSharedImageAndRectangle.Item2.Width, - tupleOfSharedImageAndRectangle.Item2.Height); - emitter.Post(croppedSharedImage, envelope.OriginatingTime); + (var sharedImage, var rectangle) = tupleOfSharedImageAndRectangle; + var actualRectangle = clip ? GetImageSizeClippedRectangle(rectangle, sharedImage.Resource.Width, sharedImage.Resource.Height) : rectangle; + if (actualRectangle.IsEmpty) + { + emitter.Post(null, envelope.OriginatingTime); + } + else + { + using var croppedSharedImage = sharedImageAllocator(actualRectangle.Width, actualRectangle.Height, sharedImage.Resource.PixelFormat); + sharedImage.Resource.Crop(croppedSharedImage.Resource, actualRectangle, clip: false); + emitter.Post(croppedSharedImage, envelope.OriginatingTime); + } }, deliveryPolicy); } /// - /// Crops a shared image using the specified rectangle. + /// Crops a shared image using the specified nullable rectangle. When no rectangle is specified, produces a null image. /// - /// Source of image and rectangle samples. + /// Source of image and rectangle messages. + /// An optional parameter indicating whether to clip the region (by default false). + /// An optional delivery policy. + /// Optional image allocator to create new shared image. + /// Returns a producer generating new cropped image samples. + public static IProducer> Crop( + this IProducer<(Shared, Rectangle?)> source, + bool clip = false, + DeliveryPolicy<(Shared, Rectangle?)> deliveryPolicy = null, + Func> sharedImageAllocator = null) + { + sharedImageAllocator ??= ImagePool.GetOrCreate; + return source.Process<(Shared, Rectangle?), Shared>( + (tupleOfSharedImageAndRectangle, envelope, emitter) => + { + (var sharedImage, var rectangle) = tupleOfSharedImageAndRectangle; + if (rectangle.HasValue) + { + var actualRectangle = clip ? GetImageSizeClippedRectangle(rectangle.Value, sharedImage.Resource.Width, sharedImage.Resource.Height) : rectangle.Value; + if (actualRectangle.IsEmpty) + { + emitter.Post(null, envelope.OriginatingTime); + } + else + { + using var croppedSharedImage = sharedImageAllocator(actualRectangle.Width, actualRectangle.Height, sharedImage.Resource.PixelFormat); + sharedImage.Resource.Crop(croppedSharedImage.Resource, actualRectangle, clip: false); + emitter.Post(croppedSharedImage, envelope.OriginatingTime); + } + } + else + { + emitter.Post(null, envelope.OriginatingTime); + } + }, deliveryPolicy); + } + + /// + /// Crops a shared depth image using the specified rectangle. + /// + /// Source of depth image and rectangle messages. + /// An optional parameter indicating whether to clip the region (by default false). /// An optional delivery policy. /// Optional image allocator to create new shared depth image. /// Returns a producer generating new cropped image samples. - public static IProducer> Crop(this IProducer<(Shared, Rectangle)> source, DeliveryPolicy<(Shared, Rectangle)> deliveryPolicy = null, Func> sharedDepthImageAllocator = null) + public static IProducer> Crop( + this IProducer<(Shared, Rectangle)> source, + bool clip = false, + DeliveryPolicy<(Shared, Rectangle)> deliveryPolicy = null, + Func> sharedDepthImageAllocator = null) { sharedDepthImageAllocator ??= DepthImagePool.GetOrCreate; return source.Process<(Shared, Rectangle), Shared>( - (tupleOfSharedImageAndRectangle, envelope, emitter) => + (tupleOfSharedDepthImageAndRectangle, envelope, emitter) => + { + (var sharedDepthImage, var rectangle) = tupleOfSharedDepthImageAndRectangle; + var actualRectangle = clip ? GetImageSizeClippedRectangle(rectangle, sharedDepthImage.Resource.Width, sharedDepthImage.Resource.Height) : rectangle; + if (actualRectangle.IsEmpty) + { + emitter.Post(null, envelope.OriginatingTime); + } + else + { + using var croppedSharedDepthImage = sharedDepthImageAllocator(actualRectangle.Width, actualRectangle.Height); + sharedDepthImage.Resource.Crop(croppedSharedDepthImage.Resource, actualRectangle, clip: false); + emitter.Post(croppedSharedDepthImage, envelope.OriginatingTime); + } + }, deliveryPolicy); + } + + /// + /// Crops a shared depth image using the specified nullable rectangle. When no rectangle is specified, produces a null image. + /// + /// Source of depth image and rectangle messages. + /// An optional parameter indicating whether to clip the region (by default false). + /// An optional delivery policy. + /// Optional image allocator to create new shared depth image. + /// Returns a producer generating new cropped image samples. + public static IProducer> Crop( + this IProducer<(Shared, Rectangle?)> source, + bool clip = false, + DeliveryPolicy<(Shared, Rectangle?)> deliveryPolicy = null, + Func> sharedDepthImageAllocator = null) + { + sharedDepthImageAllocator ??= DepthImagePool.GetOrCreate; + return source.Process<(Shared, Rectangle?), Shared>( + (tupleOfSharedDepthImageAndRectangle, envelope, emitter) => { - using var croppedSharedImage = sharedDepthImageAllocator(tupleOfSharedImageAndRectangle.Item2.Width, tupleOfSharedImageAndRectangle.Item2.Height); - tupleOfSharedImageAndRectangle.Item1.Resource.Crop( - croppedSharedImage.Resource, - tupleOfSharedImageAndRectangle.Item2.Left, - tupleOfSharedImageAndRectangle.Item2.Top, - tupleOfSharedImageAndRectangle.Item2.Width, - tupleOfSharedImageAndRectangle.Item2.Height); - emitter.Post(croppedSharedImage, envelope.OriginatingTime); + (var sharedDepthImage, var rectangle) = tupleOfSharedDepthImageAndRectangle; + if (rectangle.HasValue) + { + var actualRectangle = clip ? GetImageSizeClippedRectangle(rectangle.Value, sharedDepthImage.Resource.Width, sharedDepthImage.Resource.Height) : rectangle.Value; + if (actualRectangle.IsEmpty) + { + emitter.Post(null, envelope.OriginatingTime); + } + else + { + using var croppedSharedDepthImage = sharedDepthImageAllocator(actualRectangle.Width, actualRectangle.Height); + sharedDepthImage.Resource.Crop(croppedSharedDepthImage.Resource, actualRectangle, clip: false); + emitter.Post(croppedSharedDepthImage, envelope.OriginatingTime); + } + } + else + { + emitter.Post(null, envelope.OriginatingTime); + } }, deliveryPolicy); } @@ -156,18 +274,6 @@ public static IProducer> Crop(this IProducer<(Shared - /// Converts a shared image to grayscale. - /// - /// Image producer to use as source images. - /// An optional delivery policy. - /// Optional image allocator to create new shared image. - /// Producers of grayscale images. - public static IProducer> ToGray(this IProducer> source, DeliveryPolicy> deliveryPolicy = null, Func> sharedImageAllocator = null) - { - return source.ToPixelFormat(PixelFormat.Gray_8bpp, deliveryPolicy, sharedImageAllocator); - } - /// /// Resizes a shared image. /// @@ -542,6 +648,33 @@ public static IProducer> Threshold(this IProducer> i } /// + /// Convolves the image with a specified kernel. + /// + /// The stream of images. + /// The kernel to use. + /// An optional delivery policy. + /// Optional image allocator to create new shared image. + /// A stream containing the results of the convolution. + public static IProducer> Convolve(this IProducer> image, int[,] kernel, DeliveryPolicy> deliveryPolicy = null, Func> sharedImageAllocator = null) + { + sharedImageAllocator ??= ImagePool.GetOrCreate; + return image.Process, Shared>( + (sharedSourceImage, envelope, emitter) => + { + if (sharedSourceImage == null) + { + emitter.Post(null, envelope.OriginatingTime); + } + else + { + using var destinationImage = sharedImageAllocator(sharedSourceImage.Resource.Width, sharedSourceImage.Resource.Height, sharedSourceImage.Resource.PixelFormat); + sharedSourceImage.Resource.Convolve(destinationImage.Resource, kernel); + emitter.Post(destinationImage, envelope.OriginatingTime); + } + }, deliveryPolicy); + } + + /// /// Encodes a shared image using a specified encoder component. /// /// A producer of images to encode. diff --git a/Sources/Imaging/Microsoft.Psi.Imaging/ToPixelFormat.cs b/Sources/Imaging/Microsoft.Psi.Imaging/ToPixelFormat.cs deleted file mode 100644 index 23abbcf0a..000000000 --- a/Sources/Imaging/Microsoft.Psi.Imaging/ToPixelFormat.cs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT license. - -namespace Microsoft.Psi.Imaging -{ - using Microsoft.Psi; - using Microsoft.Psi.Components; - - /// - /// Pipeline component that converts an shared Image to a different format. - /// - internal class ToPixelFormat : ConsumerProducer, Shared> - { - private readonly PixelFormat pixelFormat; - private System.Func> sharedImageAllocator; - - /// - /// Initializes a new instance of the class. - /// - /// The pipeline. - /// The pixel format to convert to. - /// Optional image allocator for creating new shared image. - internal ToPixelFormat(Pipeline pipeline, PixelFormat pixelFormat, System.Func> sharedImageAllocator = null) - : base(pipeline) - { - this.pixelFormat = pixelFormat; - sharedImageAllocator ??= (width, height, pixelFormat) => ImagePool.GetOrCreate(width, height, pixelFormat); - this.sharedImageAllocator = sharedImageAllocator; - } - - /// - /// Receiver for incoming image. - /// - /// The incoming image. - /// The message envelope for the incoming image. - protected override void Receive(Shared sharedImage, Envelope e) - { - // if it has the same format, shortcut the loop - if (this.pixelFormat == sharedImage.Resource.PixelFormat) - { - this.Out.Post(sharedImage, e.OriginatingTime); - } - else - { - using var image = this.sharedImageAllocator(sharedImage.Resource.Width, sharedImage.Resource.Height, this.pixelFormat); - sharedImage.Resource.CopyTo(image.Resource); - this.Out.Post(image, e.OriginatingTime); - } - } - } -} \ No newline at end of file diff --git a/Sources/Imaging/Test.Psi.Imaging.Windows/ImageTester.cs b/Sources/Imaging/Test.Psi.Imaging.Windows/ImageTester.cs index a29388170..e15ae8801 100644 --- a/Sources/Imaging/Test.Psi.Imaging.Windows/ImageTester.cs +++ b/Sources/Imaging/Test.Psi.Imaging.Windows/ImageTester.cs @@ -11,6 +11,7 @@ namespace Test.Psi.Imaging using Microsoft.Psi.Imaging; using Microsoft.Psi.Serialization; using Microsoft.VisualStudio.TestTools.UnitTesting; + using Test.Psi.Common; [TestClass] public class ImageTester @@ -262,9 +263,10 @@ public void Image_ReadBytes(PixelFormat pixelFormat) (100, 200, 255, 255, 255), }; + var bytesPerPixel = destImage.BitsPerPixel / 8; foreach (var (x, y, r, g, b) in expected) { - var bytes = destImage.ReadBytes(destImage.BitsPerPixel / 8, x * destImage.BitsPerPixel / 8 + y * destImage.Stride); + var bytes = destImage.ReadBytes(bytesPerPixel, x * bytesPerPixel + y * destImage.Stride); switch (pixelFormat) { case PixelFormat.BGR_24bpp: @@ -418,6 +420,75 @@ public void Image_CopyImage(PixelFormat srcFormat, PixelFormat dstFormat) } } + [TestMethod] + [Timeout(60000)] + [DataRow(PixelFormat.Gray_8bpp, PixelFormat.Gray_8bpp)] + [DataRow(PixelFormat.Gray_8bpp, PixelFormat.Gray_16bpp)] + [DataRow(PixelFormat.Gray_16bpp, PixelFormat.Gray_8bpp)] + [DataRow(PixelFormat.Gray_16bpp, PixelFormat.Gray_16bpp)] + + [DataRow(PixelFormat.Gray_8bpp, PixelFormat.BGR_24bpp)] + [DataRow(PixelFormat.Gray_8bpp, PixelFormat.BGRX_32bpp)] + [DataRow(PixelFormat.Gray_8bpp, PixelFormat.BGRA_32bpp)] + + [DataRow(PixelFormat.Gray_8bpp, PixelFormat.RGB_24bpp)] + [DataRow(PixelFormat.Gray_8bpp, PixelFormat.RGBA_64bpp)] + [DataRow(PixelFormat.Gray_16bpp, PixelFormat.BGR_24bpp)] + [DataRow(PixelFormat.Gray_16bpp, PixelFormat.BGRX_32bpp)] + [DataRow(PixelFormat.Gray_16bpp, PixelFormat.BGRA_32bpp)] + [DataRow(PixelFormat.Gray_16bpp, PixelFormat.RGB_24bpp)] + [DataRow(PixelFormat.Gray_16bpp, PixelFormat.RGBA_64bpp)] + + [DataRow(PixelFormat.BGR_24bpp, PixelFormat.Gray_8bpp)] + [DataRow(PixelFormat.BGR_24bpp, PixelFormat.Gray_16bpp)] + [DataRow(PixelFormat.BGRX_32bpp, PixelFormat.Gray_8bpp)] + [DataRow(PixelFormat.BGRX_32bpp, PixelFormat.Gray_16bpp)] + [DataRow(PixelFormat.BGRA_32bpp, PixelFormat.Gray_8bpp)] + [DataRow(PixelFormat.BGRA_32bpp, PixelFormat.Gray_16bpp)] + [DataRow(PixelFormat.RGB_24bpp, PixelFormat.Gray_8bpp)] + [DataRow(PixelFormat.RGB_24bpp, PixelFormat.Gray_16bpp)] + [DataRow(PixelFormat.RGBA_64bpp, PixelFormat.Gray_8bpp)] + [DataRow(PixelFormat.RGBA_64bpp, PixelFormat.Gray_16bpp)] + + [DataRow(PixelFormat.BGR_24bpp, PixelFormat.BGR_24bpp)] + [DataRow(PixelFormat.BGR_24bpp, PixelFormat.BGRX_32bpp)] + [DataRow(PixelFormat.BGR_24bpp, PixelFormat.BGRA_32bpp)] + [DataRow(PixelFormat.BGRX_32bpp, PixelFormat.BGR_24bpp)] + [DataRow(PixelFormat.BGRX_32bpp, PixelFormat.BGRX_32bpp)] + [DataRow(PixelFormat.BGRX_32bpp, PixelFormat.BGRA_32bpp)] + [DataRow(PixelFormat.BGRA_32bpp, PixelFormat.BGR_24bpp)] + [DataRow(PixelFormat.BGRA_32bpp, PixelFormat.BGRX_32bpp)] + [DataRow(PixelFormat.BGRA_32bpp, PixelFormat.BGRA_32bpp)] + + [DataRow(PixelFormat.RGB_24bpp, PixelFormat.RGB_24bpp)] + [DataRow(PixelFormat.RGB_24bpp, PixelFormat.BGR_24bpp)] + [DataRow(PixelFormat.RGB_24bpp, PixelFormat.BGRX_32bpp)] + [DataRow(PixelFormat.RGB_24bpp, PixelFormat.BGRA_32bpp)] + [DataRow(PixelFormat.RGBA_64bpp, PixelFormat.BGR_24bpp)] + [DataRow(PixelFormat.RGBA_64bpp, PixelFormat.BGRX_32bpp)] + [DataRow(PixelFormat.RGBA_64bpp, PixelFormat.BGRA_32bpp)] + [DataRow(PixelFormat.RGBA_64bpp, PixelFormat.RGB_24bpp)] + [DataRow(PixelFormat.BGR_24bpp, PixelFormat.RGB_24bpp)] + [DataRow(PixelFormat.BGRX_32bpp, PixelFormat.RGB_24bpp)] + [DataRow(PixelFormat.BGRA_32bpp, PixelFormat.RGB_24bpp)] + [DataRow(PixelFormat.BGR_24bpp, PixelFormat.RGBA_64bpp)] + [DataRow(PixelFormat.BGRX_32bpp, PixelFormat.RGBA_64bpp)] + [DataRow(PixelFormat.BGRA_32bpp, PixelFormat.RGBA_64bpp)] + [DataRow(PixelFormat.RGB_24bpp, PixelFormat.RGBA_64bpp)] + [DataRow(PixelFormat.RGBA_64bpp, PixelFormat.RGBA_64bpp)] + public void Image_ConvertViaOperator(PixelFormat srcFormat, PixelFormat dstFormat) + { + using var pipeline = Pipeline.Create("ConvertViaOperator"); + using var srcImage = ImagePool.GetOrCreate(this.testImage2.Width, this.testImage2.Height, srcFormat); + this.testImage2.CopyTo(srcImage.Resource); + Generators.Return(pipeline, srcImage).Convert(dstFormat).Do(dstImage => + { + using var refImage = srcImage.Resource.Convert(dstFormat); + this.AssertAreImagesEqual(refImage, dstImage.Resource); + }); + pipeline.Run(); + } + [TestMethod] [Timeout(60000)] [DataRow(PixelFormat.Gray_8bpp)] @@ -1050,6 +1121,60 @@ public void EncodeDepthImageAsTiff() this.AssertAreImagesEqual(testDepthImage, decodedDepthImage); } + [TestMethod] + [Timeout(60000)] + [DataRow(PixelFormat.BGRA_32bpp)] + [DataRow(PixelFormat.BGRX_32bpp)] + [DataRow(PixelFormat.BGR_24bpp)] + [DataRow(PixelFormat.RGB_24bpp)] + [DataRow(PixelFormat.RGBA_64bpp)] + [DataRow(PixelFormat.Gray_8bpp)] + [DataRow(PixelFormat.Gray_16bpp)] + public void Image_SaveAndLoad(PixelFormat pixelFormat) + { + string filename = $"TestImage_{pixelFormat}.bmp"; + + // Create a test image in the specified pixel format + using var sourceImage = this.testImage.Convert(pixelFormat); + + try + { + if (pixelFormat == PixelFormat.Gray_16bpp || pixelFormat == PixelFormat.RGBA_64bpp) + { + // Gray_16bpp and RGBA_64bpp are not supported for file operations + Assert.ThrowsException(() => sourceImage.Save(filename)); + } + else + { + // Save the image to a file + sourceImage.Save(filename); + + // Load the image from file and compare + using (var testImage = Image.FromFile(filename)) + { + if (pixelFormat == PixelFormat.RGB_24bpp) + { + // RGB_24bpp images are converted to BGR_24bpp before saving + this.AssertAreImagesEqual(sourceImage.Convert(PixelFormat.BGR_24bpp), testImage); + } + else if (pixelFormat == PixelFormat.BGRX_32bpp) + { + // BGRX_32bpp images are converted to BGRA_32bpp before saving + this.AssertAreImagesEqual(sourceImage.Convert(PixelFormat.BGRA_32bpp), testImage); + } + else + { + this.AssertAreImagesEqual(sourceImage, testImage); + } + } + } + } + finally + { + TestRunner.SafeFileDelete(filename); + } + } + private void AssertAreImagesEqual(ImageBase referenceImage, ImageBase subjectImage, double tolerance = 6.0, double percentOutliersAllowed = 0.01) { ImageError err = new ImageError(); diff --git a/Sources/Imaging/Test.Psi.Imaging.Windows/Properties/AssemblyInfo.cs b/Sources/Imaging/Test.Psi.Imaging.Windows/Properties/AssemblyInfo.cs index 03417c630..1222c1262 100644 --- a/Sources/Imaging/Test.Psi.Imaging.Windows/Properties/AssemblyInfo.cs +++ b/Sources/Imaging/Test.Psi.Imaging.Windows/Properties/AssemblyInfo.cs @@ -10,6 +10,6 @@ [assembly: AssemblyCopyright("Copyright (c) Microsoft Corporation. All rights reserved.")] [assembly: ComVisible(false)] [assembly: Guid("191df615-3d8f-45a3-b763-dd4a604a712a")] -[assembly: AssemblyVersion("0.15.49.1")] -[assembly: AssemblyFileVersion("0.15.49.1")] -[assembly: AssemblyInformationalVersion("0.15.49.1-beta")] +[assembly: AssemblyVersion("0.16.92.1")] +[assembly: AssemblyFileVersion("0.16.92.1")] +[assembly: AssemblyInformationalVersion("0.16.92.1-beta")] diff --git a/Sources/Integrations/CognitiveServices/Microsoft.Psi.CognitiveServices.Face/Microsoft.Psi.CognitiveServices.Face.csproj b/Sources/Integrations/CognitiveServices/Microsoft.Psi.CognitiveServices.Face/Microsoft.Psi.CognitiveServices.Face.csproj index 7e8fc470f..77c2769bd 100644 --- a/Sources/Integrations/CognitiveServices/Microsoft.Psi.CognitiveServices.Face/Microsoft.Psi.CognitiveServices.Face.csproj +++ b/Sources/Integrations/CognitiveServices/Microsoft.Psi.CognitiveServices.Face/Microsoft.Psi.CognitiveServices.Face.csproj @@ -6,24 +6,24 @@ ../../../../Build/Microsoft.Psi.ruleset Provides components for using Microsoft's Cognitive Services Face API. - - bin\Debug\netstandard2.0\Microsoft.Psi.CognitiveServices.Face.xml - true - + + bin\Debug\netstandard2.0\Microsoft.Psi.CognitiveServices.Face.xml + true + - - bin\Debug\netstandard2.0\Microsoft.Psi.CognitiveServices.Face\Microsoft.Psi.CognitiveServices.Face.xml - true - + + bin\Debug\netstandard2.0\Microsoft.Psi.CognitiveServices.Face\Microsoft.Psi.CognitiveServices.Face.xml + true + - - all - runtime; build; native; contentfiles; analyzers + + all + runtime; build; native; contentfiles; analyzers @@ -31,8 +31,9 @@ runtime; build; native; contentfiles; analyzers - - - + + + + \ No newline at end of file diff --git a/Sources/Integrations/CognitiveServices/Microsoft.Psi.CognitiveServices.Face/PersonGroupTasks.cs b/Sources/Integrations/CognitiveServices/Microsoft.Psi.CognitiveServices.Face/PersonGroupTasks.cs index f1dcd12eb..4203d7b17 100644 --- a/Sources/Integrations/CognitiveServices/Microsoft.Psi.CognitiveServices.Face/PersonGroupTasks.cs +++ b/Sources/Integrations/CognitiveServices/Microsoft.Psi.CognitiveServices.Face/PersonGroupTasks.cs @@ -6,6 +6,7 @@ namespace Microsoft.Psi.CognitiveServices.Face using System; using System.Drawing; using System.IO; + using Microsoft.Psi.Data; using Microsoft.Psi.Imaging; /// diff --git a/Sources/Integrations/CognitiveServices/Test.Psi.CognitiveServices.Language/Test.Psi.CognitiveServices.Language.csproj b/Sources/Integrations/CognitiveServices/Test.Psi.CognitiveServices.Language/Test.Psi.CognitiveServices.Language.csproj index c48f69423..afba47bff 100644 --- a/Sources/Integrations/CognitiveServices/Test.Psi.CognitiveServices.Language/Test.Psi.CognitiveServices.Language.csproj +++ b/Sources/Integrations/CognitiveServices/Test.Psi.CognitiveServices.Language/Test.Psi.CognitiveServices.Language.csproj @@ -34,7 +34,7 @@ all runtime; build; native; contentfiles; analyzers - + diff --git a/Sources/Integrations/CognitiveServices/Test.Psi.CognitiveServices.Speech/Test.Psi.CognitiveServices.Speech.csproj b/Sources/Integrations/CognitiveServices/Test.Psi.CognitiveServices.Speech/Test.Psi.CognitiveServices.Speech.csproj index 3adb40897..e429b8a00 100644 --- a/Sources/Integrations/CognitiveServices/Test.Psi.CognitiveServices.Speech/Test.Psi.CognitiveServices.Speech.csproj +++ b/Sources/Integrations/CognitiveServices/Test.Psi.CognitiveServices.Speech/Test.Psi.CognitiveServices.Speech.csproj @@ -34,7 +34,7 @@ all runtime; build; native; contentfiles; analyzers - + diff --git a/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/Microsoft.Psi.MicrosoftSpeech.Windows.csproj b/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/Microsoft.Psi.MicrosoftSpeech.Windows.csproj index a795849aa..a95151d59 100644 --- a/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/Microsoft.Psi.MicrosoftSpeech.Windows.csproj +++ b/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/Microsoft.Psi.MicrosoftSpeech.Windows.csproj @@ -1,7 +1,7 @@  net472 - true + false Provides Windows-specific components for the using the Microsoft.Speech recognition platform. Microsoft.Psi.MicrosoftSpeech diff --git a/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeech.cs b/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeech.cs index 9791c39cb..9aabf68e5 100644 --- a/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeech.cs +++ b/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeech.cs @@ -3,6 +3,7 @@ namespace Microsoft.Psi.MicrosoftSpeech { + using System; using System.Collections.Generic; using System.Globalization; using Microsoft.Psi.Language; @@ -11,6 +12,7 @@ namespace Microsoft.Psi.MicrosoftSpeech /// /// Static helper methods. /// + [Obsolete("The MicrosoftSpeechRecognizer component has been deprecated. Consider using the SystemSpeechRecognizer component available in Microsoft.Psi.Speech.Windows instead.", false)] public static class MicrosoftSpeech { /// diff --git a/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechIntentDetector.cs b/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechIntentDetector.cs index 035d8e326..6692d2696 100644 --- a/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechIntentDetector.cs +++ b/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechIntentDetector.cs @@ -22,6 +22,7 @@ namespace Microsoft.Psi.MicrosoftSpeech /// - Click here to download the Microsoft Speech Platform runtime. /// - Click here to download the Microsoft Speech Platform language pack. /// + [Obsolete("The MicrosoftSpeechRecognizer component has been deprecated. Consider using the SystemSpeechRecognizer component available in Microsoft.Psi.Speech.Windows instead.", false)] public sealed class MicrosoftSpeechIntentDetector : ConsumerProducer, ISourceComponent, IDisposable { /// @@ -61,7 +62,7 @@ public MicrosoftSpeechIntentDetector(Pipeline pipeline, MicrosoftSpeechIntentDet /// /// Initializes a new instance of the class. /// - /// The Psi pipeline. + /// The pipeline to add the component to. /// The name of the configuration file. public MicrosoftSpeechIntentDetector(Pipeline pipeline, string configurationFilename = null) : this( diff --git a/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechIntentDetectorConfiguration.cs b/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechIntentDetectorConfiguration.cs index 0debc395a..75c3cbc54 100644 --- a/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechIntentDetectorConfiguration.cs +++ b/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechIntentDetectorConfiguration.cs @@ -3,8 +3,8 @@ namespace Microsoft.Psi.MicrosoftSpeech { + using System; using System.Xml.Serialization; - using Microsoft.Psi.Audio; using Microsoft.Psi.Speech; /// @@ -14,6 +14,7 @@ namespace Microsoft.Psi.MicrosoftSpeech /// Use this class to configure a new instance of the component. /// Refer to the properties in this class for more information on the various configuration options. /// + [Obsolete("The MicrosoftSpeechRecognizer component has been deprecated. Consider using the SystemSpeechRecognizer component available in Microsoft.Psi.Speech.Windows instead.", false)] public sealed class MicrosoftSpeechIntentDetectorConfiguration { /// diff --git a/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechRecognizer.cs b/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechRecognizer.cs index 4deae4d2c..b58c6346a 100644 --- a/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechRecognizer.cs +++ b/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechRecognizer.cs @@ -31,6 +31,7 @@ namespace Microsoft.Psi.MicrosoftSpeech /// position offset of the recognized audio as reported by the recognition engine to compute an estimate of the originating time. For /// partial hypotheses, we use the engine's current offset into the audio stream to estimate the originating time. /// + [Obsolete("The MicrosoftSpeechRecognizer component has been deprecated. Consider using the SystemSpeechRecognizer component available in Microsoft.Psi.Speech.Windows instead.", false)] public sealed class MicrosoftSpeechRecognizer : ConsumerProducer, ISourceComponent, IDisposable { /// @@ -122,7 +123,7 @@ public MicrosoftSpeechRecognizer(Pipeline pipeline, MicrosoftSpeechRecognizerCon /// /// Initializes a new instance of the class. /// - /// The Psi pipeline. + /// The pipeline to add the component to. /// The name of the configuration file. public MicrosoftSpeechRecognizer(Pipeline pipeline, string configurationFilename = null) : this( @@ -525,7 +526,7 @@ private void OnLoadGrammarCompleted(object sender, LoadGrammarCompletedEventArgs /// originating times. /// /// The type of the output stream. - /// The pipeline in which this component was created. + /// The pipeline to add the component to. /// The name of the stream. /// The group in which to create the stream. /// The newly created emitter for the stream. diff --git a/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechRecognizerConfiguration.cs b/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechRecognizerConfiguration.cs index 78cfbcfd2..44754bbe3 100644 --- a/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechRecognizerConfiguration.cs +++ b/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/MicrosoftSpeechRecognizerConfiguration.cs @@ -3,6 +3,7 @@ namespace Microsoft.Psi.MicrosoftSpeech { + using System; using System.Xml.Serialization; using Microsoft.Psi.Audio; using Microsoft.Psi.Speech; @@ -14,6 +15,7 @@ namespace Microsoft.Psi.MicrosoftSpeech /// Use this class to configure a new instance of the component. /// Refer to the properties in this class for more information on the various configuration options. /// + [Obsolete("The MicrosoftSpeechRecognizer component has been deprecated. Consider using the SystemSpeechRecognizer component available in Microsoft.Psi.Speech.Windows instead.", false)] public sealed class MicrosoftSpeechRecognizerConfiguration { /// diff --git a/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/README.md b/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/README.md index 913443272..4596fc448 100644 --- a/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/README.md +++ b/Sources/Integrations/MicrosoftSpeech/Microsoft.Psi.MicrosoftSpeech.Windows/README.md @@ -1,5 +1,7 @@ # Microsoft Speech Component +**[NOTE: This component has been deprecated as of \\psi release 0.16. Existing users should migrate to the SystemSpeechRecognizer component available in the Microsoft.Psi.Speech.Windows project.]** + This project builds the integration component for speech recognition based on the Microsoft Speech Platform. In order to build this project, the [Microsoft Speech Platform SDK v11.0](http://go.microsoft.com/fwlink/?LinkID=223570) must be installed on your machine. Note that only the 64-bit version of the SDK is currently supported. Additionally, you will need to set an environment variable named `MsSpeechSdkDir` that points to the location in which you installed the SDK. The path should be the root of the SDK folder which contains the Assembly directory. By default, this is `C:\Program Files\Microsoft SDKs\Speech\v11.0` diff --git a/Sources/Integrations/Onnx/Common/ModelRunners/ImageNet/ImageNetModelOutputParser.cs b/Sources/Integrations/Onnx/Common/ModelRunners/ImageNet/ImageNetModelOutputParser.cs index d1924da27..e31a219e4 100644 --- a/Sources/Integrations/Onnx/Common/ModelRunners/ImageNet/ImageNetModelOutputParser.cs +++ b/Sources/Integrations/Onnx/Common/ModelRunners/ImageNet/ImageNetModelOutputParser.cs @@ -15,51 +15,63 @@ namespace Microsoft.Psi.Onnx /// public class ImageNetModelOutputParser { + /// + /// Gets the classes count. + /// + public static readonly int ClassesCount = 1000; + private readonly string[] labels; - private readonly int maxPredictions; private readonly bool applySoftmax; /// /// Initializes a new instance of the class. /// /// The path to the file containing the list of 1000 ImageNet classes. - /// The maximum number of predictions to return. /// Whether the softmax function should be applied to the raw model output. /// /// The file referenced by may be downloaded from the following location: /// https://github.com/onnx/models/raw/8d50e3f598e6d5c67c7c7253e5a203a26e731a1b/vision/classification/synset.txt. /// - public ImageNetModelOutputParser(string imageClassesFile, int maxPredictions, bool applySoftmax) + public ImageNetModelOutputParser(string imageClassesFile, bool applySoftmax) { this.labels = File.ReadAllLines(imageClassesFile); - if (this.labels.Length != 1000) + if (this.labels.Length != ClassesCount) { throw new ArgumentException($"The file {imageClassesFile} does not appear to be in the correct format. This file should contain exactly 1000 lines representing an ordered list of the 1000 ImageNet classes."); } - this.maxPredictions = maxPredictions; this.applySoftmax = applySoftmax; } /// - /// Gets the predictions from the model output. + /// Gets the list of predictions from the model output. + /// + /// The model output vector of class probabilities. + /// The unsorted list of predictions. + public List GetLabeledPredictions(float[] modelOutput) + => GetResults(this.applySoftmax ? Softmax(modelOutput) : modelOutput) + .Select(c => new LabeledPrediction { Label = this.labels[c.Index], Confidence = c.Value }) + .ToList(); + + /// + /// Gets the top-N predictions from the model output. /// /// The model output vector of class probabilities. + /// The number of top-predictions to return. /// A list of the top-N predictions, in descending probability order. - public List GetPredictions(float[] modelOutput) - { - return GetTopResults(this.applySoftmax ? Softmax(modelOutput) : modelOutput, this.maxPredictions) + public List GetTopNLabeledPredictions(float[] modelOutput, int count) + => GetTopNResults(this.applySoftmax ? Softmax(modelOutput) : modelOutput, count) .Select(c => new LabeledPrediction { Label = this.labels[c.Index], Confidence = c.Value }) .ToList(); - } - private static IEnumerable<(int Index, float Value)> GetTopResults(IEnumerable predictedClasses, int count) - { - return predictedClasses + private static IEnumerable<(int Index, float Value)> GetResults(IEnumerable predictedClasses) + => predictedClasses.Select((predictedClass, index) => (Index: index, Value: predictedClass)); + + private static IEnumerable<(int Index, float Value)> GetTopNResults(IEnumerable predictedClasses, int count) + => predictedClasses .Select((predictedClass, index) => (Index: index, Value: predictedClass)) .OrderByDescending(result => result.Value) .Take(count); - } private static IEnumerable Softmax(IEnumerable values) { diff --git a/Sources/Integrations/Onnx/Common/ModelRunners/ImageNet/ImageNetModelRunner.cs b/Sources/Integrations/Onnx/Common/ModelRunners/ImageNet/ImageNetModelRunner.cs index fc19c3c6c..c550fc70b 100644 --- a/Sources/Integrations/Onnx/Common/ModelRunners/ImageNet/ImageNetModelRunner.cs +++ b/Sources/Integrations/Onnx/Common/ModelRunners/ImageNet/ImageNetModelRunner.cs @@ -29,6 +29,7 @@ namespace Microsoft.Psi.Onnx /// public class ImageNetModelRunner : ConsumerProducer, List> { + private readonly ImageNetModelRunnerConfiguration configuration; private readonly float[] onnxInputVector = new float[3 * 224 * 224]; private readonly OnnxModel onnxModel; private readonly ImageNetModelOutputParser outputParser; @@ -45,6 +46,8 @@ public class ImageNetModelRunner : ConsumerProducer, List @@ -76,7 +79,7 @@ protected override void Receive(Shared data, Envelope envelope) var outputVector = this.onnxModel.GetPrediction(this.onnxInputVector); // parse the model output into an ordered list of the top-N predictions - var results = this.outputParser.GetPredictions(outputVector); + var results = this.outputParser.GetTopNLabeledPredictions(outputVector, this.configuration.NumberOfPredictions); // post the results this.Out.Post(results, envelope.OriginatingTime); diff --git a/Sources/Integrations/Onnx/Microsoft.Psi.Onnx.Cpu/Microsoft.Psi.Onnx.Cpu.csproj b/Sources/Integrations/Onnx/Microsoft.Psi.Onnx.Cpu/Microsoft.Psi.Onnx.Cpu.csproj index 5887f1da0..1c955c94e 100644 --- a/Sources/Integrations/Onnx/Microsoft.Psi.Onnx.Cpu/Microsoft.Psi.Onnx.Cpu.csproj +++ b/Sources/Integrations/Onnx/Microsoft.Psi.Onnx.Cpu/Microsoft.Psi.Onnx.Cpu.csproj @@ -33,7 +33,7 @@ runtime; build; native; contentfiles; analyzers; buildtransitive - + diff --git a/Sources/Integrations/Onnx/Microsoft.Psi.Onnx.Gpu/Microsoft.Psi.Onnx.Gpu.csproj b/Sources/Integrations/Onnx/Microsoft.Psi.Onnx.Gpu/Microsoft.Psi.Onnx.Gpu.csproj index 12671ecd6..8be2df523 100644 --- a/Sources/Integrations/Onnx/Microsoft.Psi.Onnx.Gpu/Microsoft.Psi.Onnx.Gpu.csproj +++ b/Sources/Integrations/Onnx/Microsoft.Psi.Onnx.Gpu/Microsoft.Psi.Onnx.Gpu.csproj @@ -32,8 +32,8 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive - - + + diff --git a/Sources/Integrations/Onnx/Test.Psi.Onnx/Test.Psi.Onnx.csproj b/Sources/Integrations/Onnx/Test.Psi.Onnx/Test.Psi.Onnx.csproj index cfb8353db..bbec1ec63 100644 --- a/Sources/Integrations/Onnx/Test.Psi.Onnx/Test.Psi.Onnx.csproj +++ b/Sources/Integrations/Onnx/Test.Psi.Onnx/Test.Psi.Onnx.csproj @@ -19,7 +19,7 @@ - + diff --git a/Sources/Integrations/ROS/Microsoft.Psi.ROS/Microsoft.Psi.ROS.fsproj b/Sources/Integrations/ROS/Microsoft.Psi.ROS/Microsoft.Psi.ROS.fsproj index 0d016237c..d892e1d17 100644 --- a/Sources/Integrations/ROS/Microsoft.Psi.ROS/Microsoft.Psi.ROS.fsproj +++ b/Sources/Integrations/ROS/Microsoft.Psi.ROS/Microsoft.Psi.ROS.fsproj @@ -2,7 +2,8 @@ netstandard2.0 - true + Provides APIs for using ROS from .NET. + true diff --git a/Sources/Kinect/Microsoft.Psi.AzureKinect.Visualization/AzureKinectBodyListVisualizationObject.cs b/Sources/Kinect/Microsoft.Psi.AzureKinect.Visualization/AzureKinectBodyListVisualizationObject.cs index dcc1c38a1..861b00188 100644 --- a/Sources/Kinect/Microsoft.Psi.AzureKinect.Visualization/AzureKinectBodyListVisualizationObject.cs +++ b/Sources/Kinect/Microsoft.Psi.AzureKinect.Visualization/AzureKinectBodyListVisualizationObject.cs @@ -11,7 +11,7 @@ namespace Microsoft.Psi.AzureKinect.Visualization /// Implements a visualization object for a list of Azure Kinect bodies. /// [VisualizationObject("Azure Kinect Bodies")] - public class AzureKinectBodyListVisualizationObject : ModelVisual3DVisualizationObjectEnumerable> + public class AzureKinectBodyListVisualizationObject : ModelVisual3DListVisualizationObject { } } \ No newline at end of file diff --git a/Sources/Kinect/Microsoft.Psi.AzureKinect.Visualization/AzureKinectBodyVisualizationObject.cs b/Sources/Kinect/Microsoft.Psi.AzureKinect.Visualization/AzureKinectBodyVisualizationObject.cs index a260a3625..172e6cfaa 100644 --- a/Sources/Kinect/Microsoft.Psi.AzureKinect.Visualization/AzureKinectBodyVisualizationObject.cs +++ b/Sources/Kinect/Microsoft.Psi.AzureKinect.Visualization/AzureKinectBodyVisualizationObject.cs @@ -4,13 +4,15 @@ namespace Microsoft.Psi.AzureKinect.Visualization { using System; + using System.Collections.Generic; using System.ComponentModel; + using System.Linq; using System.Runtime.Serialization; - using System.Windows; using System.Windows.Media; - using HelixToolkit.Wpf; + using MathNet.Spatial.Euclidean; using Microsoft.Azure.Kinect.BodyTracking; using Microsoft.Psi.AzureKinect; + using Microsoft.Psi.Visualization.DataTypes; using Microsoft.Psi.Visualization.VisualizationObjects; using Xceed.Wpf.Toolkit.PropertyGrid.Attributes; using Win3D = System.Windows.Media.Media3D; @@ -21,15 +23,8 @@ namespace Microsoft.Psi.AzureKinect.Visualization [VisualizationObject("Azure Kinect Body")] public class AzureKinectBodyVisualizationObject : ModelVisual3DVisualizationObject { - private readonly UpdatableVisual3DDictionary visualJoints; - private readonly UpdatableVisual3DDictionary<(JointId ChildJoint, JointId ParentJoint), PipeVisual3D> visualBones; + private static readonly Dictionary<(JointId ChildJoint, JointId ParentJoint), bool> AzureKinectBodyGraph = AzureKinectBody.Bones.ToDictionary(j => j, j => true); - private Color color = Colors.White; - private double inferredJointsOpacity = 30; - private double boneDiameterMm = 20; - private double jointRadiusMm = 15; - private bool showBillboard = false; - private int polygonResolution = 6; private double billboardHeightCm = 100; /// @@ -37,73 +32,64 @@ public class AzureKinectBodyVisualizationObject : ModelVisual3DVisualizationObje /// public AzureKinectBodyVisualizationObject() { - this.visualJoints = new UpdatableVisual3DDictionary(null); - this.visualBones = new UpdatableVisual3DDictionary<(JointId ChildJoint, JointId ParentJoint), PipeVisual3D>(null); - - this.Billboard = new BillboardTextVisualizationObject(); + this.Skeleton = new SkeletonVisualizationObject( + nodeVisibilityFunc: + jointType => + { + var jointState = this.CurrentData.Joints[jointType].Confidence; + var isTracked = jointState == JointConfidenceLevel.High || jointState == JointConfidenceLevel.Medium; + return jointState != JointConfidenceLevel.None && (isTracked || this.Skeleton.InferredJointsOpacity > 0); + }, + nodeFillFunc: + jointType => + { + var jointState = this.CurrentData.Joints[jointType].Confidence; + var isTracked = jointState == JointConfidenceLevel.High || jointState == JointConfidenceLevel.Medium; + return isTracked ? + new SolidColorBrush(this.Skeleton.NodeColor) : + new SolidColorBrush( + Color.FromArgb( + (byte)(Math.Max(0, Math.Min(100, this.Skeleton.InferredJointsOpacity)) * 2.55), + this.Skeleton.NodeColor.R, + this.Skeleton.NodeColor.G, + this.Skeleton.NodeColor.B)); + }, + edgeVisibilityFunc: + bone => + { + var parentState = this.CurrentData.Joints[bone.Item1].Confidence; + var childState = this.CurrentData.Joints[bone.Item2].Confidence; + var parentIsTracked = parentState == JointConfidenceLevel.High || parentState == JointConfidenceLevel.Medium; + var childIsTracked = childState == JointConfidenceLevel.High || childState == JointConfidenceLevel.Medium; + var isTracked = parentIsTracked && childIsTracked; + return parentState != JointConfidenceLevel.None && childState != JointConfidenceLevel.None && (isTracked || this.Skeleton.InferredJointsOpacity > 0); + }, + edgeFillFunc: + bone => + { + var parentState = this.CurrentData.Joints[bone.Item1].Confidence; + var childState = this.CurrentData.Joints[bone.Item2].Confidence; + var parentIsTracked = parentState == JointConfidenceLevel.High || parentState == JointConfidenceLevel.Medium; + var childIsTracked = childState == JointConfidenceLevel.High || childState == JointConfidenceLevel.Medium; + var isTracked = parentIsTracked && childIsTracked; + return isTracked ? + new SolidColorBrush(this.Skeleton.NodeColor) : + new SolidColorBrush( + Color.FromArgb( + (byte)(Math.Max(0, Math.Min(100, this.Skeleton.InferredJointsOpacity)) * 2.55), + this.Skeleton.NodeColor.R, + this.Skeleton.NodeColor.G, + this.Skeleton.NodeColor.B)); + }); + + this.Skeleton.RegisterChildPropertyChangedNotifications(this, nameof(this.Skeleton)); + + this.Billboard = new BillboardTextVisualizationObject() { Visible = false }; this.Billboard.RegisterChildPropertyChangedNotifications(this, nameof(this.Billboard)); this.UpdateVisibility(); } - /// - /// Gets or sets the color. - /// - [DataMember] - [Description("Color of the body.")] - public Color Color - { - get { return this.color; } - set { this.Set(nameof(this.Color), ref this.color, value); } - } - - /// - /// Gets or sets the inferred joints opacity. - /// - [DataMember] - [Description("Opacity for rendering inferred joints and bones.")] - public double InferredJointsOpacity - { - get { return this.inferredJointsOpacity; } - set { this.Set(nameof(this.InferredJointsOpacity), ref this.inferredJointsOpacity, value); } - } - - /// - /// Gets or sets the bone diameter. - /// - [DataMember] - [DisplayName("Bone diameter (mm)")] - [Description("Diameter of bones (mm).")] - public double BoneDiameterMm - { - get { return this.boneDiameterMm; } - set { this.Set(nameof(this.BoneDiameterMm), ref this.boneDiameterMm, value); } - } - - /// - /// Gets or sets the joint diameter. - /// - [DataMember] - [DisplayName("Joint radius (mm)")] - [Description("Radius of joints (mm).")] - public double JointRadiusMm - { - get { return this.jointRadiusMm; } - set { this.Set(nameof(this.JointRadiusMm), ref this.jointRadiusMm, value); } - } - - /// - /// Gets or sets a value indicating whether to show a billboard with information about the body. - /// - [DataMember] - [PropertyOrder(0)] - [Description("Show a billboard with information about the body.")] - public bool ShowBillboard - { - get { return this.showBillboard; } - set { this.Set(nameof(this.ShowBillboard), ref this.showBillboard, value); } - } - /// /// Gets or sets the height at which to draw the billboard (cm). /// @@ -118,25 +104,24 @@ public double BillboardHeightCm } /// - /// Gets the billboard visualization object for the spatial entity. + /// Gets the billboard visualization object for the body. /// [ExpandableObject] [DataMember] [PropertyOrder(2)] - [DisplayName("Billboard Properties")] + [DisplayName("Billboard")] [Description("The billboard properties.")] public BillboardTextVisualizationObject Billboard { get; private set; } /// - /// Gets or sets the number of divisions to use when rendering polygons for joints and bones. + /// Gets the skeleton visualization object for the body. /// + [ExpandableObject] [DataMember] - [Description("Level of resolution at which to render joint and bone polygons (minimum value is 3).")] - public int PolygonResolution - { - get { return this.polygonResolution; } - set { this.Set(nameof(this.PolygonResolution), ref this.polygonResolution, value < 3 ? 3 : value); } - } + [PropertyOrder(3)] + [DisplayName("Skeleton")] + [Description("The body's skeleton properties.")] + public SkeletonVisualizationObject Skeleton { get; private set; } /// public override void UpdateData() @@ -152,19 +137,7 @@ public override void UpdateData() /// public override void NotifyPropertyChanged(string propertyName) { - if (propertyName == nameof(this.Color) || - propertyName == nameof(this.InferredJointsOpacity) || - propertyName == nameof(this.BoneDiameterMm) || - propertyName == nameof(this.JointRadiusMm) || - propertyName == nameof(this.PolygonResolution)) - { - this.UpdateVisuals(); - } - else if (propertyName == nameof(this.ShowBillboard)) - { - this.UpdateBillboardVisibility(); - } - else if (propertyName == nameof(this.BillboardHeightCm)) + if (propertyName == nameof(this.BillboardHeightCm)) { this.UpdateBillboard(); } @@ -176,106 +149,18 @@ public override void NotifyPropertyChanged(string propertyName) private void UpdateVisuals() { - this.visualJoints.BeginUpdate(); - this.visualBones.BeginUpdate(); + this.UpdateSkeleton(); + this.UpdateBillboard(); + } + private void UpdateSkeleton() + { if (this.CurrentData != null) { - var trackedEntitiesBrush = new SolidColorBrush(this.Color); - var untrackedEntitiesBrush = new SolidColorBrush( - Color.FromArgb( - (byte)(Math.Max(0, Math.Min(100, this.InferredJointsOpacity)) * 2.55), - this.Color.R, - this.Color.G, - this.Color.B)); - - // update the joints - foreach (var jointType in this.CurrentData.Joints.Keys) - { - var jointState = this.CurrentData.Joints[jointType].Confidence; - var visualJoint = this.visualJoints[jointType]; - visualJoint.BeginEdit(); - var isTracked = jointState == JointConfidenceLevel.High || jointState == JointConfidenceLevel.Medium; - var visible = jointState != JointConfidenceLevel.None && (isTracked || this.InferredJointsOpacity > 0); - - if (visible) - { - var jointPosition = this.CurrentData.Joints[jointType].Pose.Origin; - - if (visualJoint.Radius != this.JointRadiusMm / 1000.0) - { - visualJoint.Radius = this.JointRadiusMm / 1000.0; - } - - var fill = isTracked ? trackedEntitiesBrush : untrackedEntitiesBrush; - if (visualJoint.Fill != fill) - { - visualJoint.Fill = fill; - } - - visualJoint.Transform = new Win3D.TranslateTransform3D(jointPosition.X, jointPosition.Y, jointPosition.Z); - - visualJoint.PhiDiv = this.PolygonResolution; - visualJoint.ThetaDiv = this.PolygonResolution; - - visualJoint.Visible = true; - } - else - { - visualJoint.Visible = false; - } - - visualJoint.EndEdit(); - } - - // update the bones - foreach (var bone in AzureKinectBody.Bones) - { - var parentState = this.CurrentData.Joints[bone.ParentJoint].Confidence; - var childState = this.CurrentData.Joints[bone.ChildJoint].Confidence; - var parentIsTracked = parentState == JointConfidenceLevel.High || parentState == JointConfidenceLevel.Medium; - var childIsTracked = childState == JointConfidenceLevel.High || childState == JointConfidenceLevel.Medium; - var isTracked = parentIsTracked && childIsTracked; - var visible = parentState != JointConfidenceLevel.None && childState != JointConfidenceLevel.None && (isTracked || this.InferredJointsOpacity > 0); - var visualBone = this.visualBones[bone]; - visualBone.BeginEdit(); - if (visible) - { - if (visualBone.Diameter != this.BoneDiameterMm / 1000.0) - { - visualBone.Diameter = this.BoneDiameterMm / 1000.0; - } - - var joint1Position = this.visualJoints[bone.ParentJoint].Transform.Value; - var joint2Position = this.visualJoints[bone.ChildJoint].Transform.Value; - - visualBone.Point1 = new Win3D.Point3D(joint1Position.OffsetX, joint1Position.OffsetY, joint1Position.OffsetZ); - visualBone.Point2 = new Win3D.Point3D(joint2Position.OffsetX, joint2Position.OffsetY, joint2Position.OffsetZ); - - var fill = isTracked ? trackedEntitiesBrush : untrackedEntitiesBrush; - if (visualBone.Fill != fill) - { - visualBone.Fill = fill; - } - - visualBone.ThetaDiv = this.PolygonResolution; - - visualBone.Visible = true; - } - else - { - visualBone.Visible = false; - } - - visualBone.EndEdit(); - } - - // set billboard position - this.UpdateBillboard(); + var points = this.CurrentData.Joints.ToDictionary(kvp => kvp.Key, kvp => kvp.Value.Pose.Origin); + var graph = new Graph(points, AzureKinectBodyGraph); + this.Skeleton.SetCurrentValue(this.SynthesizeMessage(graph)); } - - this.visualJoints.EndUpdate(); - this.visualBones.EndUpdate(); } private void UpdateBillboard() @@ -291,14 +176,10 @@ private void UpdateBillboard() private void UpdateVisibility() { - this.UpdateChildVisibility(this.visualJoints, this.Visible && this.CurrentData != default); - this.UpdateChildVisibility(this.visualBones, this.Visible && this.CurrentData != default); - this.UpdateBillboardVisibility(); - } + bool childrenVisible = this.Visible && this.CurrentData != default; - private void UpdateBillboardVisibility() - { - this.UpdateChildVisibility(this.Billboard.ModelView, this.Visible && this.CurrentData != default && this.ShowBillboard); + this.UpdateChildVisibility(this.Skeleton.ModelView, childrenVisible); + this.UpdateChildVisibility(this.Billboard.ModelView, childrenVisible); } } } \ No newline at end of file diff --git a/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/AzureKinectBodyTracker.cs b/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/AzureKinectBodyTracker.cs index 9f7944be3..5da2b380c 100644 --- a/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/AzureKinectBodyTracker.cs +++ b/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/AzureKinectBodyTracker.cs @@ -19,11 +19,11 @@ namespace Microsoft.Psi.AzureKinect /// before arriving at the tracker. Unencoded or non-lossy (e.g. PNG) encoding are okay. public sealed class AzureKinectBodyTracker : ConsumerProducer<(Shared Depth, Shared IR), List>, IDisposable { - private static readonly object TrackerCreationLock = new object(); + private static readonly object TrackerCreationLock = new (); private readonly AzureKinectBodyTrackerConfiguration configuration; - private readonly List currentBodies = new List(); - private readonly Capture capture = new Capture(); + private readonly List currentBodies = new (); + private readonly Capture capture = new (); private Tracker tracker = null; private byte[] depthBytes = null; @@ -32,7 +32,7 @@ public sealed class AzureKinectBodyTracker : ConsumerProducer<(Shared /// Initializes a new instance of the class. /// - /// Pipeline to add this component to. + /// The pipeline to add the component to. /// An optional configuration to use for the body tracker. public AzureKinectBodyTracker(Pipeline pipeline, AzureKinectBodyTrackerConfiguration configuration = null) : base(pipeline) diff --git a/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/AzureKinectCore.cs b/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/AzureKinectCore.cs index 76c763f72..574ee6985 100644 --- a/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/AzureKinectCore.cs +++ b/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/AzureKinectCore.cs @@ -4,18 +4,15 @@ namespace Microsoft.Psi.AzureKinect { using System; - using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.Threading; - using System.Threading.Tasks; using MathNet.Numerics.LinearAlgebra; using MathNet.Spatial.Euclidean; using Microsoft.Azure.Kinect.Sensor; using Microsoft.Psi; using Microsoft.Psi.Calibration; using Microsoft.Psi.Components; - using Microsoft.Psi.DeviceManagement; using Microsoft.Psi.Imaging; using Image = Microsoft.Psi.Imaging.Image; diff --git a/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/AzureKinectSensor.cs b/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/AzureKinectSensor.cs index 708759571..35e8b1418 100644 --- a/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/AzureKinectSensor.cs +++ b/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/AzureKinectSensor.cs @@ -22,7 +22,7 @@ public class AzureKinectSensor : Subpipeline /// /// Initializes a new instance of the class. /// - /// Pipeline to add this component to. + /// The pipeline to add the component to. /// Configuration to use for the sensor. /// An optional default delivery policy for the subpipeline (defaults is LatestMessage). /// An optional delivery policy for sending the depth-and-IR images stream to the body tracker (default is LatestMessage). diff --git a/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/Microsoft.Psi.AzureKinect.x64.csproj b/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/Microsoft.Psi.AzureKinect.x64.csproj index 8f4d75a9f..fb688c4dd 100644 --- a/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/Microsoft.Psi.AzureKinect.x64.csproj +++ b/Sources/Kinect/Microsoft.Psi.AzureKinect.x64/Microsoft.Psi.AzureKinect.x64.csproj @@ -30,7 +30,7 @@ - + all runtime; build; native; contentfiles; analyzers; buildtransitive diff --git a/Sources/Kinect/Microsoft.Psi.Kinect.Face.Windows.x64/KinectFaceDetector.cs b/Sources/Kinect/Microsoft.Psi.Kinect.Face.Windows.x64/KinectFaceDetector.cs index cf924467d..380e63575 100644 --- a/Sources/Kinect/Microsoft.Psi.Kinect.Face.Windows.x64/KinectFaceDetector.cs +++ b/Sources/Kinect/Microsoft.Psi.Kinect.Face.Windows.x64/KinectFaceDetector.cs @@ -30,7 +30,7 @@ public class KinectFaceDetector : IKinectFaceDetector, ISourceComponent, IDispos /// /// Initializes a new instance of the class. /// - /// Pipeline this sensor is a part of. + /// The pipeline to add the component to. /// Psi Kinect device from which we get our associated bodies. /// Configuration to use. public KinectFaceDetector(Pipeline pipeline, KinectSensor kinectSensor, KinectFaceDetectorConfiguration configuration = null) @@ -203,7 +203,7 @@ internal class KinectBodyReceiver : IConsumer> /// Initializes a new instance of the class. /// Defines an internal receiver for receiving the KinectBody from our associated Kinect sensor. /// - /// Pipeline sensor is running in. + /// The pipeline to add the component to. /// Our parent face detector. public KinectBodyReceiver(Pipeline pipeline, KinectFaceDetector faceDetector) { diff --git a/Sources/Kinect/Microsoft.Psi.Kinect.Visualization.Windows/KinectBodyListVisualizationObject.cs b/Sources/Kinect/Microsoft.Psi.Kinect.Visualization.Windows/KinectBodyListVisualizationObject.cs index ef7fa087f..33c1ee6cf 100644 --- a/Sources/Kinect/Microsoft.Psi.Kinect.Visualization.Windows/KinectBodyListVisualizationObject.cs +++ b/Sources/Kinect/Microsoft.Psi.Kinect.Visualization.Windows/KinectBodyListVisualizationObject.cs @@ -3,7 +3,6 @@ namespace Microsoft.Psi.Kinect.Visualization { - using System.Collections.Generic; using Microsoft.Psi.Kinect; using Microsoft.Psi.Visualization.VisualizationObjects; @@ -11,7 +10,7 @@ namespace Microsoft.Psi.Kinect.Visualization /// Represents a visualization object for Azure Kinect bodies. /// [VisualizationObject("Kinect Bodies")] - public class KinectBodyListVisualizationObject : ModelVisual3DVisualizationObjectEnumerable> + public class KinectBodyListVisualizationObject : ModelVisual3DListVisualizationObject { } } \ No newline at end of file diff --git a/Sources/Kinect/Microsoft.Psi.Kinect.Visualization.Windows/KinectBodyVisualizationObject.cs b/Sources/Kinect/Microsoft.Psi.Kinect.Visualization.Windows/KinectBodyVisualizationObject.cs index 3d56cf769..7d3f778fc 100644 --- a/Sources/Kinect/Microsoft.Psi.Kinect.Visualization.Windows/KinectBodyVisualizationObject.cs +++ b/Sources/Kinect/Microsoft.Psi.Kinect.Visualization.Windows/KinectBodyVisualizationObject.cs @@ -118,7 +118,7 @@ public double BillboardHeightCm } /// - /// Gets the billboard visualization object for the spatial entity. + /// Gets the billboard visualization object for the body. /// [ExpandableObject] [DataMember] diff --git a/Sources/Kinect/Microsoft.Psi.Kinect.Windows/KinectInternalCalibration.cs b/Sources/Kinect/Microsoft.Psi.Kinect.Windows/KinectInternalCalibration.cs index 8fd5bdac0..92ce0fd61 100644 --- a/Sources/Kinect/Microsoft.Psi.Kinect.Windows/KinectInternalCalibration.cs +++ b/Sources/Kinect/Microsoft.Psi.Kinect.Windows/KinectInternalCalibration.cs @@ -9,6 +9,7 @@ namespace Microsoft.Psi.Kinect using System.Collections.Generic; using System.Xml.Serialization; using MathNet.Numerics.LinearAlgebra; + using MathNet.Spatial.Euclidean; using Microsoft.Kinect; using Microsoft.Psi.Calibration; using static Microsoft.Psi.Calibration.CalibrationExtensions; @@ -20,11 +21,11 @@ internal class KinectInternalCalibration public const int colorImageWidth = 1920; public const int colorImageHeight = 1080; - public Matrix colorCameraMatrix = Matrix.Build.Dense(3, 3); - public Vector colorLensDistortion = Vector.Build.Dense(5); - public Matrix depthCameraMatrix = Matrix.Build.Dense(3, 3); - public Vector depthLensDistortion = Vector.Build.Dense(5); - public Matrix depthToColorTransform = Matrix.Build.Dense(4, 4); + public Matrix colorCameraMatrix; + public Vector colorLensDistortion; + public Matrix depthCameraMatrix; + public Vector depthLensDistortion; + public Matrix depthToColorTransform; [XmlIgnoreAttribute] public bool silent = true; @@ -32,11 +33,15 @@ internal class KinectInternalCalibration internal void RecoverCalibrationFromSensor(Microsoft.Kinect.KinectSensor kinectSensor) { var stopWatch = new System.Diagnostics.Stopwatch(); - stopWatch.Start(); - var objectPoints1 = new List>(); - var colorPoints1 = new List(); - var depthPoints1 = new List(); + if (!this.silent) + { + stopWatch.Start(); + } + + var objectPoints1 = new List(); + var colorPoints1 = new List(); + var depthPoints1 = new List(); int n = 0; for (float x = -2f; x < 2f; x += 0.2f) @@ -61,36 +66,34 @@ internal void RecoverCalibrationFromSensor(Microsoft.Kinect.KinectSensor kinectS (kinectDepthPoint.Y >= 0) && (kinectDepthPoint.Y < depthImageHeight)) { n++; - var objectPoint = Vector.Build.Dense(3); - objectPoint[0] = kinectCameraPoint.X; - objectPoint[1] = kinectCameraPoint.Y; - objectPoint[2] = kinectCameraPoint.Z; - objectPoints1.Add(objectPoint); + objectPoints1.Add(new Point3D(kinectCameraPoint.X, kinectCameraPoint.Y, kinectCameraPoint.Z)); - var colorPoint = new System.Drawing.PointF(); - colorPoint.X = kinectColorPoint.X; - colorPoint.Y = kinectColorPoint.Y; + var colorPoint = new Point2D(kinectColorPoint.X, kinectColorPoint.Y); colorPoints1.Add(colorPoint); - - //Console.WriteLine(objectPoint[0] + "\t" + objectPoint[1] + "\t" + colorPoint.X + "\t" + colorPoint.Y); - - var depthPoint = new System.Drawing.PointF(); - depthPoint.X = kinectDepthPoint.X; - depthPoint.Y = kinectDepthPoint.Y; + var depthPoint = new Point2D(kinectDepthPoint.X, kinectDepthPoint.Y); depthPoints1.Add(depthPoint); } } - this.colorCameraMatrix[0, 0] = 1000; //fx - this.colorCameraMatrix[1, 1] = 1000; //fy - this.colorCameraMatrix[0, 2] = colorImageWidth / 2; //cx - this.colorCameraMatrix[1, 2] = colorImageHeight / 2; //cy - this.colorCameraMatrix[2, 2] = 1; - - var rotation = Vector.Build.Dense(3); - var translation = Vector.Build.Dense(3); - var colorError = CalibrateColorCamera(objectPoints1, colorPoints1, colorCameraMatrix, colorLensDistortion, rotation, translation, this.silent); + var initialColorCameraMatrix = Matrix.Build.Dense(3, 3); + var initialColorDistortion = Vector.Build.Dense(2); + initialColorCameraMatrix[0, 0] = 1000; //fx + initialColorCameraMatrix[1, 1] = 1000; //fy + initialColorCameraMatrix[0, 2] = colorImageWidth / 2; //cx + initialColorCameraMatrix[1, 2] = colorImageHeight / 2; //cy + initialColorCameraMatrix[2, 2] = 1; + + var colorError = CalibrateCameraIntrinsicsAndExtrinsics( + objectPoints1, + colorPoints1, + initialColorCameraMatrix, + initialColorDistortion, + out this.colorCameraMatrix, + out this.colorLensDistortion, + out var rotation, + out var translation, + this.silent); var rotationMatrix = AxisAngleToMatrix(rotation); this.depthToColorTransform = Matrix.Build.DenseIdentity(4, 4); @@ -101,14 +104,22 @@ internal void RecoverCalibrationFromSensor(Microsoft.Kinect.KinectSensor kinectS this.depthToColorTransform[i, j] = rotationMatrix[i, j]; } + var initialDepthCameraMatrix = Matrix.Build.Dense(3, 3); + var initialDepthDistortion = Vector.Build.Dense(2); + initialDepthCameraMatrix[0, 0] = 360; //fx + initialDepthCameraMatrix[1, 1] = 360; //fy + initialDepthCameraMatrix[0, 2] = depthImageWidth / 2.0; //cx + initialDepthCameraMatrix[1, 2] = depthImageHeight / 2.0; //cy + initialDepthCameraMatrix[2, 2] = 1; - this.depthCameraMatrix[0, 0] = 360; //fx - this.depthCameraMatrix[1, 1] = 360; //fy - this.depthCameraMatrix[0, 2] = depthImageWidth / 2.0; //cx - this.depthCameraMatrix[1, 2] = depthImageHeight / 2.0; //cy - this.depthCameraMatrix[2, 2] = 1; - - var depthError = CalibrateDepthCamera(objectPoints1, depthPoints1, depthCameraMatrix, depthLensDistortion, silent); + var depthError = CalibrateCameraIntrinsics( + objectPoints1, + depthPoints1, + initialDepthCameraMatrix, + initialDepthDistortion, + out this.depthCameraMatrix, + out this.depthLensDistortion, + this.silent); // check projections double depthProjectionError = 0; @@ -122,36 +133,33 @@ internal void RecoverCalibrationFromSensor(Microsoft.Kinect.KinectSensor kinectS // "camera space" == depth camera space // depth camera projection - double depthU, depthV; - Project(depthCameraMatrix, depthLensDistortion, testObjectPoint[0], testObjectPoint[1], testObjectPoint[2], out depthU, out depthV); + Project(depthCameraMatrix, depthLensDistortion, testObjectPoint, out Point2D projectedDepthPoint); - double dx = testDepthPoint.X - depthU; - double dy = testDepthPoint.Y - depthV; + double dx = testDepthPoint.X - projectedDepthPoint.X; + double dy = testDepthPoint.Y - projectedDepthPoint.Y; depthProjectionError += (dx * dx) + (dy * dy); // color camera projection - testObjectPoint4[0] = testObjectPoint[0]; - testObjectPoint4[1] = testObjectPoint[1]; - testObjectPoint4[2] = testObjectPoint[2]; + testObjectPoint4[0] = testObjectPoint.X; + testObjectPoint4[1] = testObjectPoint.Y; + testObjectPoint4[2] = testObjectPoint.Z; testObjectPoint4[3] = 1; var color = depthToColorTransform * testObjectPoint4; - color *= (1.0 / color[3]); // not necessary for this transform + color *= 1.0 / color[3]; // not necessary for this transform - double colorU, colorV; - Project(colorCameraMatrix, colorLensDistortion, color[0], color[1], color[2], out colorU, out colorV); + Project(colorCameraMatrix, colorLensDistortion, new Point3D(color[0], color[1], color[2]), out Point2D projectedColorPoint); - dx = testColorPoint.X - colorU; - dy = testColorPoint.Y - colorV; + dx = testColorPoint.X - projectedColorPoint.X; + dy = testColorPoint.Y - projectedColorPoint.Y; colorProjectionError += (dx * dx) + (dy * dy); } depthProjectionError /= n; colorProjectionError /= n; - - stopWatch.Stop(); if (!this.silent) { + stopWatch.Stop(); Console.WriteLine("FakeCalibration :"); Console.WriteLine("n = " + n); Console.WriteLine("color error = " + colorError); @@ -167,383 +175,5 @@ internal void RecoverCalibrationFromSensor(Microsoft.Kinect.KinectSensor kinectS Console.WriteLine("________________________________________________________"); } } - - private static void Project(Matrix cameraMatrix, Vector distCoeffs, double x, double y, double z, out double u, out double v) - { - double xp = x / z; - double yp = y / z; - - double fx = cameraMatrix[0, 0]; - double fy = cameraMatrix[1, 1]; - double cx = cameraMatrix[0, 2]; - double cy = cameraMatrix[1, 2]; - double k1 = distCoeffs[0]; - double k2 = distCoeffs[1]; - - // compute f(xp, yp) - double rSquared = xp * xp + yp * yp; - double xpp = xp * (1 + k1 * rSquared + k2 * rSquared * rSquared); - double ypp = yp * (1 + k1 * rSquared + k2 * rSquared * rSquared); - u = fx * xpp + cx; - v = fy * ypp + cy; - } - - private static void Undistort(Matrix cameraMatrix, Vector distCoeffs, double xin, double yin, out double xout, out double yout) - { - float fx = (float)cameraMatrix[0, 0]; - float fy = (float)cameraMatrix[1, 1]; - float cx = (float)cameraMatrix[0, 2]; - float cy = (float)cameraMatrix[1, 2]; - float[] kappa = new float[] { (float)distCoeffs[0], (float)distCoeffs[1] }; - Undistort(fx, fy, cx, cy, kappa, xin, yin, out xout, out yout); - } - - private static void Undistort(float fx, float fy, float cx, float cy, float[] kappa, double xin, double yin, out double xout, out double yout) - { - // maps coords in undistorted image (xin, yin) to coords in distorted image (xout, yout) - double x = (xin - cx) / fx; - double y = (yin - cy) / fy; // chances are you will need to flip y before passing in: imageHeight - yin - - // Newton Raphson - double ru = Math.Sqrt(x * x + y * y); - double rdest = ru; - double factor = 1.0; - - bool converged = false; - for (int j = 0; (j < 100) && !converged; j++) - { - double rdest2 = rdest * rdest; - double num = 1.0, denom = 1.0; - double rk = 1.0; - - factor = 1.0; - for (int k = 0; k < 2; k++) - { - rk *= rdest2; - factor += kappa[k] * rk; - denom += (2.0 * k + 3.0) * kappa[k] * rk; - } - num = rdest * factor - ru; - rdest -= (num / denom); - - converged = (num / denom) < 0.0001; - } - xout = x / factor; - yout = y / factor; - } - - private static double CalibrateDepthCamera(List> worldPoints, List imagePoints, Matrix cameraMatrix, Vector distCoeffs, bool silent = true) - { - int nPoints = worldPoints.Count; - - // pack parameters into vector - // parameters: fx, fy, cx, cy, k1, k2 = 6 parameters - int nParameters = 6; - var parameters = Vector.Build.Dense(nParameters); - - { - int pi = 0; - parameters[pi++] = cameraMatrix[0, 0]; // fx - parameters[pi++] = cameraMatrix[1, 1]; // fy - parameters[pi++] = cameraMatrix[0, 2]; // cx - parameters[pi++] = cameraMatrix[1, 2]; // cy - parameters[pi++] = distCoeffs[0]; // k1 - parameters[pi++] = distCoeffs[1]; // k2 - } - - // size of our error vector - int nValues = nPoints * 2; // each component (x,y) is a separate entry - - LevenbergMarquardt.Function function = delegate (Vector p) - { - var fvec = Vector.Build.Dense(nValues); - - // unpack parameters - int pi = 0; - double fx = p[pi++]; - double fy = p[pi++]; - double cx = p[pi++]; - double cy = p[pi++]; - double k1 = p[pi++]; - double k2 = p[pi++]; - - var K = Matrix.Build.DenseIdentity(3, 3); - K[0, 0] = fx; - K[1, 1] = fy; - K[0, 2] = cx; - K[1, 2] = cy; - - var d = Vector.Build.Dense(5, 0); - d[0] = k1; - d[1] = k2; - - int fveci = 0; - for (int i = 0; i < worldPoints.Count; i++) - { - double u, v; - var x = worldPoints[i]; - KinectInternalCalibration.Project(K, d, x[0], x[1], x[2], out u, out v); - - var imagePoint = imagePoints[i]; - fvec[fveci++] = imagePoint.X - u; - fvec[fveci++] = imagePoint.Y - v; - } - return fvec; - }; - - // optimize - var calibrate = new LevenbergMarquardt(function); - while (calibrate.State == LevenbergMarquardt.States.Running) - { - var rmsError = calibrate.MinimizeOneStep(parameters); - if (!silent) Console.WriteLine("rms error = " + rmsError); - } - if (!silent) - { - for (int i = 0; i < nParameters; i++) - Console.WriteLine(parameters[i] + "\t"); - Console.WriteLine(); - } - - // unpack parameters - { - int pi = 0; - double fx = parameters[pi++]; - double fy = parameters[pi++]; - double cx = parameters[pi++]; - double cy = parameters[pi++]; - double k1 = parameters[pi++]; - double k2 = parameters[pi++]; - cameraMatrix[0, 0] = fx; - cameraMatrix[1, 1] = fy; - cameraMatrix[0, 2] = cx; - cameraMatrix[1, 2] = cy; - distCoeffs[0] = k1; - distCoeffs[1] = k2; - } - - return calibrate.RMSError; - } - - private static double CalibrateColorCamera(List> worldPoints, List imagePoints, Matrix cameraMatrix, Vector distCoeffs, Vector rotation, Vector translation, bool silent = true) - { - int nPoints = worldPoints.Count; - - { - Matrix R; - Vector t; - DLT(cameraMatrix, distCoeffs, worldPoints, imagePoints, out R, out t); - var r = MatrixToAxisAngle(R); - r.CopyTo(rotation); - t.CopyTo(translation); - } - - // pack parameters into vector - // parameters: fx, fy, cx, cy, k1, k2, + 3 for rotation, 3 translation = 12 - int nParameters = 12; - var parameters = Vector.Build.Dense(nParameters); - { - int pi = 0; - parameters[pi++] = cameraMatrix[0, 0]; // fx - parameters[pi++] = cameraMatrix[1, 1]; // fy - parameters[pi++] = cameraMatrix[0, 2]; // cx - parameters[pi++] = cameraMatrix[1, 2]; // cy - parameters[pi++] = distCoeffs[0]; // k1 - parameters[pi++] = distCoeffs[1]; // k2 - parameters[pi++] = rotation[0]; - parameters[pi++] = rotation[1]; - parameters[pi++] = rotation[2]; - parameters[pi++] = translation[0]; - parameters[pi++] = translation[1]; - parameters[pi++] = translation[2]; - - } - - // size of our error vector - int nValues = nPoints * 2; // each component (x,y) is a separate entry - - LevenbergMarquardt.Function function = delegate (Vector p) - { - var fvec = Vector.Build.Dense(nValues); - - // unpack parameters - int pi = 0; - double fx = p[pi++]; - double fy = p[pi++]; - double cx = p[pi++]; - double cy = p[pi++]; - - double k1 = p[pi++]; - double k2 = p[pi++]; - - var K = Matrix.Build.DenseIdentity(3, 3); - K[0, 0] = fx; - K[1, 1] = fy; - K[0, 2] = cx; - K[1, 2] = cy; - - var d = Vector.Build.Dense(5, 0); - d[0] = k1; - d[1] = k2; - - var r = Vector.Build.Dense(3); - r[0] = p[pi++]; - r[1] = p[pi++]; - r[2] = p[pi++]; - - var t = Vector.Build.Dense(3); - t[0] = p[pi++]; - t[1] = p[pi++]; - t[2] = p[pi++]; - - var R = AxisAngleToMatrix(r); - - int fveci = 0; - for (int i = 0; i < worldPoints.Count; i++) - { - // transform world point to local camera coordinates - var x = R * worldPoints[i]; - x += t; - - // fvec_i = y_i - f(x_i) - double u, v; - KinectInternalCalibration.Project(K, d, x[0], x[1], x[2], out u, out v); - - var imagePoint = imagePoints[i]; - fvec[fveci++] = imagePoint.X - u; - fvec[fveci++] = imagePoint.Y - v; - } - return fvec; - }; - - // optimize - var calibrate = new LevenbergMarquardt(function); - while (calibrate.State == LevenbergMarquardt.States.Running) - { - var rmsError = calibrate.MinimizeOneStep(parameters); - if (!silent) Console.WriteLine("rms error = " + rmsError); - } - if (!silent) - { - for (int i = 0; i < nParameters; i++) - Console.WriteLine(parameters[i] + "\t"); - Console.WriteLine(); - } - // unpack parameters - { - int pi = 0; - double fx = parameters[pi++]; - double fy = parameters[pi++]; - double cx = parameters[pi++]; - double cy = parameters[pi++]; - double k1 = parameters[pi++]; - double k2 = parameters[pi++]; - cameraMatrix[0, 0] = fx; - cameraMatrix[1, 1] = fy; - cameraMatrix[0, 2] = cx; - cameraMatrix[1, 2] = cy; - distCoeffs[0] = k1; - distCoeffs[1] = k2; - rotation[0] = parameters[pi++]; - rotation[1] = parameters[pi++]; - rotation[2] = parameters[pi++]; - translation[0] = parameters[pi++]; - translation[1] = parameters[pi++]; - translation[2] = parameters[pi++]; - } - - - return calibrate.RMSError; - } - - // Use DLT to obtain estimate of calibration rig pose; in our case this is the pose of the Kinect camera. - // This pose estimate will provide a good initial estimate for subsequent projector calibration. - // Note for a full PnP solution we should probably refine with Levenberg-Marquardt. - // DLT is described in Hartley and Zisserman p. 178 - private static void DLT(Matrix cameraMatrix, VectordistCoeffs, List> worldPoints, List imagePoints, out Matrix R, out Vectort) - { - int n = worldPoints.Count; - - var A = Matrix.Build.Dense(2 * n, 12); - - for (int j = 0; j < n; j++) - { - var X = worldPoints[j]; - var imagePoint = imagePoints[j]; - - double x, y; - Undistort(cameraMatrix, distCoeffs, imagePoint.X, imagePoint.Y, out x, out y); - - int ii = 2 * j; - A[ii, 4] = -X[0]; - A[ii, 5] = -X[1]; - A[ii, 6] = -X[2]; - A[ii, 7] = -1; - - A[ii, 8] = y * X[0]; - A[ii, 9] = y * X[1]; - A[ii, 10] = y * X[2]; - A[ii, 11] = y; - - ii++; // next row - A[ii, 0] = X[0]; - A[ii, 1] = X[1]; - A[ii, 2] = X[2]; - A[ii, 3] = 1; - - A[ii, 8] = -x * X[0]; - A[ii, 9] = -x * X[1]; - A[ii, 10] = -x * X[2]; - A[ii, 11] = -x; - } - - // Pcolumn is the eigenvector of ATA with the smallest eignvalue - var Pcolumn = Vector.Build.Dense(12); - { - var ATA = A.TransposeThisAndMultiply(A); - ATA.Evd().EigenVectors.Column(0).CopyTo(Pcolumn); - } - - // reshape into 3x4 projection matrix - var P = Matrix.Build.Dense(3, 4); - { - for (int i = 0; i < 3; i++) - { - for (int j = 0; j < 4; j++) - { - P[i, j] = Pcolumn[i*4 + j]; - } - } - } - - R = Matrix.Build.Dense(3, 3); - for (int i = 0; i < 3; i++) - for (int j = 0; j < 3; j++) - R[i, j] = P[i, j]; - - if (R.Determinant() < 0) - { - R *= -1; - P *= -1; - } - - // orthogonalize R - { - var svd = R.Svd(); - R = svd.U * svd.VT; - } - - // determine scale factor - var RP = Matrix.Build.Dense(3, 3); - for (int i = 0; i < 3; i++) - for (int j = 0; j < 3; j++) - RP[i, j] = P[i, j]; - double s = RP.L2Norm() / R.L2Norm(); - - t = Vector.Build.Dense(3); - for (int i = 0; i < 3; i++) - t[i] = P[i, 3]; - t *= (1.0 / s); - } } } diff --git a/Sources/Kinect/Microsoft.Psi.Kinect.Windows/KinectSensor.cs b/Sources/Kinect/Microsoft.Psi.Kinect.Windows/KinectSensor.cs index f4b2b9667..3466342bf 100644 --- a/Sources/Kinect/Microsoft.Psi.Kinect.Windows/KinectSensor.cs +++ b/Sources/Kinect/Microsoft.Psi.Kinect.Windows/KinectSensor.cs @@ -40,7 +40,7 @@ public class KinectSensor : ISourceComponent, IDisposable /// /// Initializes a new instance of the class. /// - /// Pipeline this sensor is a part of. + /// The pipeline to add the component to. /// Name of configuration file. public KinectSensor(Pipeline pipeline, string configurationFilename) : this(pipeline) @@ -52,7 +52,7 @@ public KinectSensor(Pipeline pipeline, string configurationFilename) /// /// Initializes a new instance of the class. /// - /// Pipeline this sensor is a part of. + /// The pipeline to add the component to. /// Configuration to use. public KinectSensor(Pipeline pipeline, KinectSensorConfiguration configuration) : this(pipeline) @@ -63,7 +63,7 @@ public KinectSensor(Pipeline pipeline, KinectSensorConfiguration configuration) /// /// Initializes a new instance of the class. /// - /// Pipeline this sensor is a part of. + /// The pipeline to add the component to. private KinectSensor(Pipeline pipeline) { this.pipeline = pipeline; @@ -473,6 +473,7 @@ private void MapColorToDepth(DepthFrame depthFrame, Shared colorImage) byte* dstRow = (byte*)rgbd.Resource.ImageData.ToPointer(); int depthWidth = depthFrame.FrameDescription.Width; int depthHeight = depthFrame.FrameDescription.Height; + var bytesPerPixel = colorImage.Resource.BitsPerPixel / 8; for (int y = 0; y < colorImage.Resource.Height; y++) { byte* srcCol = srcRow; @@ -494,7 +495,7 @@ private void MapColorToDepth(DepthFrame depthFrame, Shared colorImage) } dstCol += 4; - srcCol += colorImage.Resource.BitsPerPixel / 8; + srcCol += bytesPerPixel; offset++; } diff --git a/Sources/Kinect/Test.Psi.Kinect.Windows.x64/Mesh.cs b/Sources/Kinect/Test.Psi.Kinect.Windows.x64/Mesh.cs index c1bb79136..e8632956b 100644 --- a/Sources/Kinect/Test.Psi.Kinect.Windows.x64/Mesh.cs +++ b/Sources/Kinect/Test.Psi.Kinect.Windows.x64/Mesh.cs @@ -1,32 +1,32 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT license. - -namespace Test.Psi.Kinect -{ - using MathNet.Numerics.LinearAlgebra; - using MathNet.Spatial.Euclidean; +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Test.Psi.Kinect +{ + using MathNet.Numerics.LinearAlgebra; + using MathNet.Spatial.Euclidean; using Microsoft.Psi; using Microsoft.Psi.Calibration; - using Microsoft.Psi.Imaging; - using Microsoft.Psi.Kinect; - + using Microsoft.Psi.Imaging; + using Microsoft.Psi.Kinect; + /// /// Create Mesh. - /// - public class Mesh - { - private HalfEdge[] edges; - private Face[] faces; - private Vertex[] vertices; - + /// + public class Mesh + { + private HalfEdge[] edges; + private Face[] faces; + private Vertex[] vertices; + /// /// Gets number of mesh vertices. - /// - public int NumberVertices => this.Vertices.Length; - + /// + public int NumberVertices => this.Vertices.Length; + /// /// Gets number of mesh faces. - /// + /// public int NumberFaces => this.Faces.Length; /// @@ -51,249 +51,245 @@ public class Mesh /// Color data image. /// Kinect calibration. /// Mesh. - public static Mesh MeshFromDepthMap(Shared depthImage, Shared colorData, IDepthDeviceCalibrationInfo calib) - { - Mesh mesh = new Mesh(); - int width = depthImage.Resource.Width; - int height = depthImage.Resource.Height; - mesh.Vertices = new Vertex[width * height]; - bool[] vertexValid = new bool[width * height]; - mesh.Faces = new Face[2 * (width - 1) * (height - 1)]; - byte[] depthData = depthImage.Resource.ReadBytes(depthImage.Resource.Size); - byte[] pixelData = colorData.Resource.ReadBytes(colorData.Resource.Size); - int count = 0; - unsafe - { - for (int i = 0; i < height; i++) - { - for (int j = 0; j < width; j++) - { - ushort* src = (ushort*)((byte*)depthImage.Resource.ImageData.ToPointer() + (i * depthImage.Resource.Stride)) + j; - ushort depth = *src; - Point2D pt = new Point2D(j, i); - vertexValid[count] = (depth == 0) ? false : true; - mesh.Vertices[count].Pos = new Point3D(0.0, 0.0, 0.0); - mesh.Vertices[count].Color = new Point3D(0.0, 0.0, 0.0); - if (depth != 0) - { - Point2D pixelCoord; - - // Determine vertex position+color via new calibration - Point2D newpt = new Point2D(pt.X, calib.DepthIntrinsics.ImageHeight - pt.Y); - Point3D p = calib.DepthIntrinsics.ToCameraSpace(newpt, depth, true); - mesh.Vertices[count].Pos = new Point3D(p.X / 1000.0, p.Y / 1000.0, p.Z / 1000.0); - - Vector pos = Vector.Build.Dense(4); - pos[0] = mesh.Vertices[count].Pos.X; - pos[1] = mesh.Vertices[count].Pos.Y; - pos[2] = mesh.Vertices[count].Pos.Z; - pos[3] = 1.0; - - pos = calib.ColorExtrinsics * pos; - Point3D clrPt = new Point3D(pos[0], pos[1], pos[2]); - pixelCoord = calib.ColorIntrinsics.ToPixelSpace(clrPt, true); - if (pixelCoord.X >= 0 && pixelCoord.X < colorData.Resource.Width && - pixelCoord.Y >= 0 && pixelCoord.Y < colorData.Resource.Height) - { - byte* pixel = ((byte*)colorData.Resource.ImageData.ToPointer() + ((int)pixelCoord.Y * colorData.Resource.Stride)) + (4 * (int)pixelCoord.X); - mesh.Vertices[count].Color = new Point3D((double)(int)*(pixel + 2), (double)(int)*(pixel + 1), (double)(int)*pixel); - } - } - - count++; - } - } - } - - count = 0; - - // Create our edge list - // - // There are 6 edges per quad along with the edges - // around the outside of the entire image (2*(width+height-2) edges) - // <------ <------ - // X ------> X ------> X - // |^ 0 //|| 6 //|| - // || // || // || - // ||2 1// ||8 7// || - // || // || // || - // || //3 4|| //9 10|| - // || // || // || - // v| // 5 v| // 11 v| - // X <------ X <------ X - // ------> ------> - // |^ 12 //|| 18 //|| - // || // || // || - // ||14 13// ||20 19// || - // || // 16|| // 22|| - // || //15 || //21 || - // || // || // || - // v| // 17 v| // 23 v| - // X <------ X <------ X - // ------> ------> - int edgeOffset = (width - 1) * (height - 1) * 6; - int numEdges = edgeOffset + (2 * (width - 1)) + (2 * (height - 1)); - mesh.Edges = new HalfEdge[numEdges]; - for (int i = 0; i < numEdges; i++) - { - mesh.Edges[i] = default(HalfEdge); - } - - int faceIndex = 0; - int edgeIndex = 0; - - // Create our edge list - for (int j = 0; j < height - 1; j++) - { - for (int i = 0; i < width - 1; i++) - { - mesh.Faces[faceIndex] = default(Face); - - mesh.Faces[faceIndex].Valid = - vertexValid[(j * width) + i + 1] && - vertexValid[((j + 1) * width) + i] && - vertexValid[(j * width) + i]; - - mesh.Edges[edgeIndex].Ccw = edgeIndex + 2; - mesh.Edges[edgeIndex].Cw = edgeIndex + 1; - mesh.Edges[edgeIndex].Face = faceIndex; - mesh.Edges[edgeIndex].Head = (j * width) + i + 1; - if (j == 0) - { - mesh.Edges[edgeIndex].Opp = edgeOffset + i; - } - else - { - mesh.Edges[edgeIndex].Opp = edgeIndex - (width * 6) + 5; - } - - mesh.Edges[edgeIndex + 1].Ccw = edgeIndex; - mesh.Edges[edgeIndex + 1].Cw = edgeIndex + 2; - mesh.Edges[edgeIndex + 1].Face = faceIndex; - mesh.Edges[edgeIndex + 1].Head = ((j + 1) * width) + i; - mesh.Edges[edgeIndex + 1].Opp = edgeIndex + 3; - - mesh.Edges[edgeIndex + 2].Ccw = edgeIndex + 1; - mesh.Edges[edgeIndex + 2].Cw = edgeIndex; - mesh.Edges[edgeIndex + 2].Face = faceIndex; - mesh.Edges[edgeIndex + 2].Head = (j * width) + i; - if (i == 0) - { - mesh.Edges[edgeIndex].Opp = edgeOffset + (width - 1) + j; - } - else - { - mesh.Edges[edgeIndex].Opp = edgeIndex - 4; - } - - mesh.Faces[faceIndex].Edge = edgeIndex; - edgeIndex += 3; - faceIndex++; - - mesh.Faces[faceIndex] = default(Face); - - mesh.Faces[faceIndex].Valid = - vertexValid[(j * width) + i + 1] && - vertexValid[((j + 1) * width) + i + 1] && - vertexValid[((j + 1) * width) + i]; - - mesh.Edges[edgeIndex].Ccw = edgeIndex + 2; - mesh.Edges[edgeIndex].Cw = edgeIndex + 1; - mesh.Edges[edgeIndex].Face = faceIndex; - mesh.Edges[edgeIndex].Head = (j * width) + i + 1; - mesh.Edges[edgeIndex].Opp = edgeIndex - 2; - - mesh.Edges[edgeIndex + 1].Ccw = edgeIndex; - mesh.Edges[edgeIndex + 1].Cw = edgeIndex + 2; - mesh.Edges[edgeIndex + 1].Face = faceIndex; - mesh.Edges[edgeIndex + 1].Head = ((j + 1) * width) + i + 1; - if (i == width - 1) - { - mesh.Edges[edgeIndex].Opp = edgeOffset + (width - 1) + (height - 1) + j; - } - else - { - mesh.Edges[edgeIndex].Opp = edgeIndex + 4; - } - - mesh.Edges[edgeIndex + 2].Ccw = edgeIndex + 1; - mesh.Edges[edgeIndex + 2].Cw = edgeIndex; - mesh.Edges[edgeIndex + 2].Face = faceIndex; - mesh.Edges[edgeIndex + 2].Head = ((j + 1) * width) + i; - if (j == height - 1) - { - mesh.Edges[edgeIndex].Opp = edgeOffset + (width - 1) + (2 * (height - 1)) + i; - } - else - { - mesh.Edges[edgeIndex].Opp = edgeIndex + ((width - 1) * 6) - 5; - } - - mesh.Faces[faceIndex].Edge = edgeIndex; - edgeIndex += 3; - faceIndex++; - } - } - - // Link up outer edges... first top edges - int prevEdge = edgeOffset + width; - int edge = edgeOffset; - for (int i = 0; i < width - 1; i++) - { - mesh.Edges[edge].Cw = prevEdge; - mesh.Edges[edge].Ccw = edge + 1; - mesh.Edges[edge].Opp = i * 6; - mesh.Edges[edge].Face = -1; - mesh.Edges[edge].Head = i; - prevEdge = edge; - edge++; - } - - // next the left edges - prevEdge = edgeOffset; - for (int i = 0; i < height - 1; i++) - { - mesh.Edges[edge].Cw = edge + 1; - mesh.Edges[edge].Ccw = prevEdge; - mesh.Edges[edge].Opp = (i * (width - 1) * 6) + 2; - mesh.Edges[edge].Face = -1; - mesh.Edges[edge].Head = width * (i + 1); - prevEdge = edge; - } - - // next the right edges - prevEdge = edgeOffset + (width - 1); - for (int i = 0; i < height - 1; i++) - { - mesh.Edges[edge].Ccw = edge + 1; - mesh.Edges[edge].Cw = prevEdge; - mesh.Edges[edge].Opp = (i * (width - 1) * 6) - 2; - mesh.Edges[edge].Face = -1; - mesh.Edges[edge].Head = (i * width) - 1; - prevEdge = edge; - } - - // finally the bottom edges - prevEdge = edgeOffset + (width - 1) + (height - 1); - for (int i = 0; i < width - 1; i++) - { - mesh.Edges[edge].Cw = edge + 1; - mesh.Edges[edge].Ccw = prevEdge; - mesh.Edges[edge].Opp = ((height - 2) * (width - 1) * 6) + (i * 6) + 5; - mesh.Edges[edge].Face = -1; - mesh.Edges[edge].Head = ((height - 1) * width) + i; - prevEdge = edge; - } - - return mesh; - } - + public static Mesh MeshFromDepthMap(Shared depthImage, Shared colorData, IDepthDeviceCalibrationInfo calib) + { + Mesh mesh = new Mesh(); + int width = depthImage.Resource.Width; + int height = depthImage.Resource.Height; + mesh.Vertices = new Vertex[width * height]; + bool[] vertexValid = new bool[width * height]; + mesh.Faces = new Face[2 * (width - 1) * (height - 1)]; + byte[] depthData = depthImage.Resource.ReadBytes(depthImage.Resource.Size); + byte[] pixelData = colorData.Resource.ReadBytes(colorData.Resource.Size); + int count = 0; + unsafe + { + for (int i = 0; i < height; i++) + { + for (int j = 0; j < width; j++) + { + ushort* src = (ushort*)((byte*)depthImage.Resource.ImageData.ToPointer() + (i * depthImage.Resource.Stride)) + j; + ushort depth = *src; + Point2D pt = new Point2D(j, i); + vertexValid[count] = (depth == 0) ? false : true; + mesh.Vertices[count].Pos = new Point3D(0.0, 0.0, 0.0); + mesh.Vertices[count].Color = new Point3D(0.0, 0.0, 0.0); + if (depth != 0) + { + // Determine vertex position+color via new calibration + Point2D newpt = new Point2D(pt.X, calib.DepthIntrinsics.ImageHeight - pt.Y); + Point3D p = calib.DepthIntrinsics.GetCameraSpacePosition(newpt, depth, true); + mesh.Vertices[count].Pos = new Point3D(p.X / 1000.0, p.Y / 1000.0, p.Z / 1000.0); + + Vector pos = Vector.Build.Dense(4); + pos[0] = mesh.Vertices[count].Pos.X; + pos[1] = mesh.Vertices[count].Pos.Y; + pos[2] = mesh.Vertices[count].Pos.Z; + pos[3] = 1.0; + + pos = calib.ColorExtrinsics * pos; + Point3D clrPt = new Point3D(pos[0], pos[1], pos[2]); + if (calib.ColorIntrinsics.TryGetPixelPosition(clrPt, true, out var pixelCoord)) + { + byte* pixel = ((byte*)colorData.Resource.ImageData.ToPointer() + ((int)pixelCoord.Y * colorData.Resource.Stride)) + (4 * (int)pixelCoord.X); + mesh.Vertices[count].Color = new Point3D((double)(int)*(pixel + 2), (double)(int)*(pixel + 1), (double)(int)*pixel); + } + } + + count++; + } + } + } + + count = 0; + + // Create our edge list + // + // There are 6 edges per quad along with the edges + // around the outside of the entire image (2*(width+height-2) edges) + // <------ <------ + // X ------> X ------> X + // |^ 0 //|| 6 //|| + // || // || // || + // ||2 1// ||8 7// || + // || // || // || + // || //3 4|| //9 10|| + // || // || // || + // v| // 5 v| // 11 v| + // X <------ X <------ X + // ------> ------> + // |^ 12 //|| 18 //|| + // || // || // || + // ||14 13// ||20 19// || + // || // 16|| // 22|| + // || //15 || //21 || + // || // || // || + // v| // 17 v| // 23 v| + // X <------ X <------ X + // ------> ------> + int edgeOffset = (width - 1) * (height - 1) * 6; + int numEdges = edgeOffset + (2 * (width - 1)) + (2 * (height - 1)); + mesh.Edges = new HalfEdge[numEdges]; + for (int i = 0; i < numEdges; i++) + { + mesh.Edges[i] = default(HalfEdge); + } + + int faceIndex = 0; + int edgeIndex = 0; + + // Create our edge list + for (int j = 0; j < height - 1; j++) + { + for (int i = 0; i < width - 1; i++) + { + mesh.Faces[faceIndex] = default(Face); + + mesh.Faces[faceIndex].Valid = + vertexValid[(j * width) + i + 1] && + vertexValid[((j + 1) * width) + i] && + vertexValid[(j * width) + i]; + + mesh.Edges[edgeIndex].Ccw = edgeIndex + 2; + mesh.Edges[edgeIndex].Cw = edgeIndex + 1; + mesh.Edges[edgeIndex].Face = faceIndex; + mesh.Edges[edgeIndex].Head = (j * width) + i + 1; + if (j == 0) + { + mesh.Edges[edgeIndex].Opp = edgeOffset + i; + } + else + { + mesh.Edges[edgeIndex].Opp = edgeIndex - (width * 6) + 5; + } + + mesh.Edges[edgeIndex + 1].Ccw = edgeIndex; + mesh.Edges[edgeIndex + 1].Cw = edgeIndex + 2; + mesh.Edges[edgeIndex + 1].Face = faceIndex; + mesh.Edges[edgeIndex + 1].Head = ((j + 1) * width) + i; + mesh.Edges[edgeIndex + 1].Opp = edgeIndex + 3; + + mesh.Edges[edgeIndex + 2].Ccw = edgeIndex + 1; + mesh.Edges[edgeIndex + 2].Cw = edgeIndex; + mesh.Edges[edgeIndex + 2].Face = faceIndex; + mesh.Edges[edgeIndex + 2].Head = (j * width) + i; + if (i == 0) + { + mesh.Edges[edgeIndex].Opp = edgeOffset + (width - 1) + j; + } + else + { + mesh.Edges[edgeIndex].Opp = edgeIndex - 4; + } + + mesh.Faces[faceIndex].Edge = edgeIndex; + edgeIndex += 3; + faceIndex++; + + mesh.Faces[faceIndex] = default(Face); + + mesh.Faces[faceIndex].Valid = + vertexValid[(j * width) + i + 1] && + vertexValid[((j + 1) * width) + i + 1] && + vertexValid[((j + 1) * width) + i]; + + mesh.Edges[edgeIndex].Ccw = edgeIndex + 2; + mesh.Edges[edgeIndex].Cw = edgeIndex + 1; + mesh.Edges[edgeIndex].Face = faceIndex; + mesh.Edges[edgeIndex].Head = (j * width) + i + 1; + mesh.Edges[edgeIndex].Opp = edgeIndex - 2; + + mesh.Edges[edgeIndex + 1].Ccw = edgeIndex; + mesh.Edges[edgeIndex + 1].Cw = edgeIndex + 2; + mesh.Edges[edgeIndex + 1].Face = faceIndex; + mesh.Edges[edgeIndex + 1].Head = ((j + 1) * width) + i + 1; + if (i == width - 1) + { + mesh.Edges[edgeIndex].Opp = edgeOffset + (width - 1) + (height - 1) + j; + } + else + { + mesh.Edges[edgeIndex].Opp = edgeIndex + 4; + } + + mesh.Edges[edgeIndex + 2].Ccw = edgeIndex + 1; + mesh.Edges[edgeIndex + 2].Cw = edgeIndex; + mesh.Edges[edgeIndex + 2].Face = faceIndex; + mesh.Edges[edgeIndex + 2].Head = ((j + 1) * width) + i; + if (j == height - 1) + { + mesh.Edges[edgeIndex].Opp = edgeOffset + (width - 1) + (2 * (height - 1)) + i; + } + else + { + mesh.Edges[edgeIndex].Opp = edgeIndex + ((width - 1) * 6) - 5; + } + + mesh.Faces[faceIndex].Edge = edgeIndex; + edgeIndex += 3; + faceIndex++; + } + } + + // Link up outer edges... first top edges + int prevEdge = edgeOffset + width; + int edge = edgeOffset; + for (int i = 0; i < width - 1; i++) + { + mesh.Edges[edge].Cw = prevEdge; + mesh.Edges[edge].Ccw = edge + 1; + mesh.Edges[edge].Opp = i * 6; + mesh.Edges[edge].Face = -1; + mesh.Edges[edge].Head = i; + prevEdge = edge; + edge++; + } + + // next the left edges + prevEdge = edgeOffset; + for (int i = 0; i < height - 1; i++) + { + mesh.Edges[edge].Cw = edge + 1; + mesh.Edges[edge].Ccw = prevEdge; + mesh.Edges[edge].Opp = (i * (width - 1) * 6) + 2; + mesh.Edges[edge].Face = -1; + mesh.Edges[edge].Head = width * (i + 1); + prevEdge = edge; + } + + // next the right edges + prevEdge = edgeOffset + (width - 1); + for (int i = 0; i < height - 1; i++) + { + mesh.Edges[edge].Ccw = edge + 1; + mesh.Edges[edge].Cw = prevEdge; + mesh.Edges[edge].Opp = (i * (width - 1) * 6) - 2; + mesh.Edges[edge].Face = -1; + mesh.Edges[edge].Head = (i * width) - 1; + prevEdge = edge; + } + + // finally the bottom edges + prevEdge = edgeOffset + (width - 1) + (height - 1); + for (int i = 0; i < width - 1; i++) + { + mesh.Edges[edge].Cw = edge + 1; + mesh.Edges[edge].Ccw = prevEdge; + mesh.Edges[edge].Opp = ((height - 2) * (width - 1) * 6) + (i * 6) + 5; + mesh.Edges[edge].Face = -1; + mesh.Edges[edge].Head = ((height - 1) * width) + i; + prevEdge = edge; + } + + return mesh; + } + /// /// Mesh face. - /// - public struct Face - { - private int edge; // index of one edge on the face + /// + public struct Face + { + private int edge; // index of one edge on the face private bool valid; /// @@ -305,14 +301,14 @@ public struct Face /// Gets or sets a value indicating whether face is valid. /// public bool Valid { get => this.valid; set => this.valid = value; } - } - + } + /// /// Mesh vertex. - /// - public struct Vertex - { - private Point3D pos; + /// + public struct Vertex + { + private Point3D pos; private Point3D color; /// @@ -324,17 +320,17 @@ public struct Vertex /// Gets or sets vertex color. /// public Point3D Color { get => this.color; set => this.color = value; } - } - + } + /// /// Mesh edge. - /// - public struct HalfEdge - { - private int ccw; // index of edge moving counter-clockwise - private int cw; // index of edge moving clockwise - private int opp; // index of opposite edge - private int face; // index of face + /// + public struct HalfEdge + { + private int ccw; // index of edge moving counter-clockwise + private int cw; // index of edge moving clockwise + private int opp; // index of opposite edge + private int face; // index of face private int head; // index of head vertex /// @@ -361,6 +357,6 @@ public struct HalfEdge /// Gets or sets head. /// public int Head { get => this.head; set => this.head = value; } - } - } -} + } + } +} diff --git a/Sources/Media/Microsoft.Psi.Media.Windows.x64/MediaCapture.cs b/Sources/Media/Microsoft.Psi.Media.Windows.x64/MediaCapture.cs index b14574d75..461bcdfd7 100644 --- a/Sources/Media/Microsoft.Psi.Media.Windows.x64/MediaCapture.cs +++ b/Sources/Media/Microsoft.Psi.Media.Windows.x64/MediaCapture.cs @@ -256,11 +256,17 @@ public void Start(Action notifyCompletionTime) this.camera.CaptureSample((data, length, timestamp) => { - var time = DateTime.FromFileTimeUtc(timestamp); using var sharedImage = ImagePool.GetOrCreate(this.configuration.Width, this.configuration.Height, PixelFormat.BGR_24bpp); sharedImage.Resource.CopyFrom(data); var originatingTime = this.pipeline.GetCurrentTimeFromElapsedTicks(timestamp); + + // Ensure that originating times are strictly increasing + if (originatingTime <= this.Out.LastEnvelope.OriginatingTime) + { + originatingTime = this.Out.LastEnvelope.OriginatingTime.AddTicks(1); + } + this.Out.Post(sharedImage, originatingTime); }); } diff --git a/Sources/Media/Microsoft.Psi.Media_Interop.Windows.x64/AssemblyInfo.cpp b/Sources/Media/Microsoft.Psi.Media_Interop.Windows.x64/AssemblyInfo.cpp index e4a8df14d..94bcd67ec 100644 --- a/Sources/Media/Microsoft.Psi.Media_Interop.Windows.x64/AssemblyInfo.cpp +++ b/Sources/Media/Microsoft.Psi.Media_Interop.Windows.x64/AssemblyInfo.cpp @@ -15,6 +15,6 @@ using namespace System::Security::Permissions; [assembly:AssemblyCopyrightAttribute("Copyright (c) Microsoft Corporation. All rights reserved.")]; [assembly:ComVisible(false)]; [assembly:CLSCompliantAttribute(true)]; -[assembly:AssemblyVersionAttribute("0.15.49.1")]; -[assembly:AssemblyFileVersionAttribute("0.15.49.1")]; -[assembly:AssemblyInformationalVersionAttribute("0.15.49.1-beta")]; +[assembly:AssemblyVersionAttribute("0.16.92.1")]; +[assembly:AssemblyFileVersionAttribute("0.16.92.1")]; +[assembly:AssemblyInformationalVersionAttribute("0.16.92.1-beta")]; diff --git a/Sources/Media/Microsoft.Psi.Media_Interop.Windows.x64/AssemblyInfo.rc b/Sources/Media/Microsoft.Psi.Media_Interop.Windows.x64/AssemblyInfo.rc index eb2d748cc9c4bd3fec86a68ce2b471b1c6b7eb88..cf19cf33437f0e00f787b43b1a3bf6ae52d6be45 100644 GIT binary patch delta 40 scmeyM{y}|19tXD>gARiwgAtH4oNUNpJb4X=5W5+J9!PBSQ4Vcx0M}*-ZU6uP delta 40 scmeyM{y}|19tXE6gARiUgC&qOoNUNpJb4X=5W6XZ9!PBSQ4Vcx0M}*-Z~y=R diff --git a/Sources/Media/Microsoft.Psi.Media_Interop.Windows.x64/MP4Writer.cpp b/Sources/Media/Microsoft.Psi.Media_Interop.Windows.x64/MP4Writer.cpp index 64f73471c..e871a4d7e 100644 --- a/Sources/Media/Microsoft.Psi.Media_Interop.Windows.x64/MP4Writer.cpp +++ b/Sources/Media/Microsoft.Psi.Media_Interop.Windows.x64/MP4Writer.cpp @@ -283,12 +283,6 @@ namespace Microsoft { { switch (pixelFormat) { - case NativePixelFormat_Undefined: - case NativePixelFormat_Gray_8bpp: - case NativePixelFormat_Gray_16bpp: - case NativePixelFormat_RGBA_64bpp: - hr = E_NOTIMPL; - break; case NativePixelFormat_BGRA_32bpp: case NativePixelFormat_BGRX_32bpp: if (bufferLength != imageWidth * imageHeight * 4) @@ -301,6 +295,14 @@ namespace Microsoft { { hr = E_UNEXPECTED; } + break; + case NativePixelFormat_Undefined: + case NativePixelFormat_Gray_8bpp: + case NativePixelFormat_Gray_16bpp: + case NativePixelFormat_RGBA_64bpp: + default: + hr = E_NOTIMPL; + break; } IFS(CopyImageDataToMediaBuffer(imageData, pixelFormat, rawBuffer)); (void)buffer->Unlock(); diff --git a/Sources/MixedReality/HoloLens2ResearchMode/HoloLens2ResearchMode.def b/Sources/MixedReality/HoloLens2ResearchMode/HoloLens2ResearchMode.def new file mode 100644 index 000000000..24e7c1235 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/HoloLens2ResearchMode.def @@ -0,0 +1,3 @@ +EXPORTS +DllCanUnloadNow = WINRT_CanUnloadNow PRIVATE +DllGetActivationFactory = WINRT_GetActivationFactory PRIVATE diff --git a/Sources/MixedReality/HoloLens2ResearchMode/HoloLens2ResearchMode.vcxproj b/Sources/MixedReality/HoloLens2ResearchMode/HoloLens2ResearchMode.vcxproj new file mode 100644 index 000000000..0a92bd96e --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/HoloLens2ResearchMode.vcxproj @@ -0,0 +1,173 @@ + + + + + true + true + true + true + {F50194C0-9561-40C7-B9CB-B977E3B3D76D} + HoloLens2ResearchMode + HoloLens2ResearchMode + en-US + 14.0 + true + Windows Store + 10.0 + 10.0.19041.0 + 10.0.17134.0 + + + + + Debug + ARM + + + Debug + ARM64 + + + Debug + Win32 + + + Debug + x64 + + + Release + ARM + + + Release + ARM64 + + + Release + Win32 + + + Release + x64 + + + + DynamicLibrary + v141 + v142 + v143 + Unicode + false + + + true + true + + + false + true + false + + + + + + + + + + + + + + + + + Use + pch.h + $(IntDir)pch.pch + Level4 + %(AdditionalOptions) /bigobj + + /DWINRT_NO_MAKE_DETECTION %(AdditionalOptions) + + + _WINRT_DLL;WIN32_LEAN_AND_MEAN;WINRT_LEAN_AND_MEAN;%(PreprocessorDefinitions) + $(WindowsSDK_WindowsMetadata);$(AdditionalUsingDirectories) + + + Console + false + HoloLens2ResearchMode.def + + + + + _DEBUG;%(PreprocessorDefinitions) + + + + + NDEBUG;%(PreprocessorDefinitions) + + + true + true + + + + + + + + + + + + + ResearchModeSensorDevice.idl + + + + + + Create + + + + + + + + + ResearchModeSensorDevice.idl + + + + + + + + + + + + + + + false + + + + + + + + + This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + + + + \ No newline at end of file diff --git a/Sources/MixedReality/HoloLens2ResearchMode/HoloLens2ResearchMode.vcxproj.filters b/Sources/MixedReality/HoloLens2ResearchMode/HoloLens2ResearchMode.vcxproj.filters new file mode 100644 index 000000000..05407b600 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/HoloLens2ResearchMode.vcxproj.filters @@ -0,0 +1,48 @@ + + + + + accd3aa8-1ba0-4223-9bbe-0c431709210b + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tga;tiff;tif;png;wav;mfcribbon-ms + + + {926ab91d-31b4-48c3-b9a4-e681349f27f0} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Sources/MixedReality/HoloLens2ResearchMode/PropertySheet.props b/Sources/MixedReality/HoloLens2ResearchMode/PropertySheet.props new file mode 100644 index 000000000..e34141b01 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/PropertySheet.props @@ -0,0 +1,16 @@ + + + + + + + + \ No newline at end of file diff --git a/Sources/MixedReality/HoloLens2ResearchMode/Readme.md b/Sources/MixedReality/HoloLens2ResearchMode/Readme.md new file mode 100644 index 000000000..e32f0bbde --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/Readme.md @@ -0,0 +1,14 @@ +# C++/WinRT HoloLens2ResearchMode Project Overview + +This project wraps [HoloLens 2 Research Mode APIs](https://github.com/microsoft/HoloLens2ForCV/blob/main/Docs/ResearchMode-ApiDoc.pdf) in Windows Runtime +classes. The generated Windows Runtime component may then be consumed by a C# Universal Windows Platform (UWP) app. + +Note that this project includes the Research Mode header file [ResearchModeApi.h](./ResearchModeApi.h), which was copied directly from [the HoloLens 2 Research Mode samples repository](https://github.com/microsoft/HoloLens2ForCV). The original file is available [here](https://github.com/microsoft/HoloLens2ForCV/blob/5b0fa70a6e67997b6efe8a2ea1d41e06264aec3c/Samples/ResearchModeApi/ResearchModeApi.h). + +## References +[Dorin Ungureanu, Federica Bogo, Silvano Galliani, Pooja Sama, Xin Duan, Casey Meekhof, Jan Stühmer, Thomas J. Cashman, Bugra Tekin, Johannes L. Schönberger, Pawel Olszta, and Marc Pollefeys. HoloLens 2 Research Mode as a Tool for Computer Vision Research. arXiv:2008.11239, 2020.](https://arxiv.org/abs/2008.11239) + +[Research mode repository](https://github.com/microsoft/HoloLens2ForCV) + +[Research mode API documentation](https://github.com/microsoft/HoloLens2ForCV/blob/main/Docs/ResearchMode-ApiDoc.pdf) + diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeAccelFrame.cpp b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeAccelFrame.cpp new file mode 100644 index 000000000..4a34c9c8d --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeAccelFrame.cpp @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#include "pch.h" +#include "ResearchModeAccelFrame.h" +#include "ResearchModeAccelFrame.g.cpp" + +namespace winrt::HoloLens2ResearchMode::implementation +{ + ResearchModeAccelFrame::ResearchModeAccelFrame(::IResearchModeSensorFrame* pSensorFrame) + { + m_pSensorFrame.attach(pSensorFrame); + m_pAccelFrame = m_pSensorFrame.as<::IResearchModeAccelFrame>(); + } + + com_array ResearchModeAccelFrame::GetCalibratedAccelarationSamples() + { + const ::AccelDataStruct* pBuffer = nullptr; // note: this is the non-IDL-generated version from ResearchModeApi.h + size_t bufferLength = 0; + HRESULT hr = m_pAccelFrame->GetCalibratedAccelarationSamples(&pBuffer, &bufferLength); + winrt::check_hresult(hr); + const AccelDataStruct* pBuffer2 = (AccelDataStruct*)(pBuffer); // cast to IDL-generated version + return winrt::com_array(pBuffer2, pBuffer2 + bufferLength); + } + + winrt::HoloLens2ResearchMode::ResearchModeSensorResolution ResearchModeAccelFrame::GetResolution() + { + ::ResearchModeSensorResolution resolution; + winrt::check_hresult(m_pSensorFrame->GetResolution(&resolution)); + return *(reinterpret_cast(&resolution)); + } + + winrt::HoloLens2ResearchMode::ResearchModeSensorTimestamp ResearchModeAccelFrame::GetTimeStamp() + { + ::ResearchModeSensorTimestamp timestamp; + winrt::check_hresult(m_pSensorFrame->GetTimeStamp(×tamp)); + return *(reinterpret_cast(×tamp)); + } +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeAccelFrame.h b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeAccelFrame.h new file mode 100644 index 000000000..1948a6587 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeAccelFrame.h @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#pragma once +#include "ResearchModeAccelFrame.g.h" + +namespace winrt::HoloLens2ResearchMode::implementation +{ + struct ResearchModeAccelFrame : ResearchModeAccelFrameT + { + ResearchModeAccelFrame(::IResearchModeSensorFrame* pSensorFrame); + + com_array GetCalibratedAccelarationSamples(); + + winrt::HoloLens2ResearchMode::ResearchModeSensorResolution GetResolution(); + winrt::HoloLens2ResearchMode::ResearchModeSensorTimestamp GetTimeStamp(); + + winrt::com_ptr<::IResearchModeSensorFrame> m_pSensorFrame; + winrt::com_ptr<::IResearchModeAccelFrame> m_pAccelFrame; + }; +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeApi.h b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeApi.h new file mode 100644 index 000000000..ac36748d5 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeApi.h @@ -0,0 +1,236 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +#include +#include + +#include +#include + +#include +#include +#include + +#include + +interface IResearchModeCameraSensor; +interface IResearchModeSensor; +interface IResearchModeSensorFrame; + +struct AccelDataStruct +{ + uint64_t VinylHupTicks; + uint64_t SocTicks; + float AccelValues[3]; + float temperature; +}; + +struct GyroDataStruct +{ + uint64_t VinylHupTicks; + uint64_t SocTicks; + float GyroValues[3]; + float temperature; +}; + +struct MagDataStruct +{ + uint64_t VinylHupTicks; + uint64_t SocTicks; + float MagValues[3]; +}; + +enum ResearchModeSensorType +{ + LEFT_FRONT, + LEFT_LEFT, + RIGHT_FRONT, + RIGHT_RIGHT, + DEPTH_AHAT, + DEPTH_LONG_THROW, + IMU_ACCEL, + IMU_GYRO, + IMU_MAG +}; + +struct ResearchModeSensorDescriptor +{ + LUID sensorId; + ResearchModeSensorType sensorType; +}; + +enum ResearchModeSensorTimestampSource +{ + SensorTimestampSource_USB_SOF = 0, + SensorTimestampSource_Unknown = 1, + SensorTimestampSource_CenterOfExposure = 2, + SensorTimestampSource_Count = 3 +}; + +struct ResearchModeSensorTimestamp { + ResearchModeSensorTimestampSource Source; + UINT64 SensorTicks; + UINT64 SensorTicksPerSecond; + UINT64 HostTicks; + UINT64 HostTicksPerSecond; +}; + +struct ResearchModeSensorResolution { + UINT32 Width; + UINT32 Height; + UINT32 Stride; + UINT32 BitsPerPixel; + UINT32 BytesPerPixel; +}; + +enum ResearchModeSensorConsent { + DeniedBySystem = 0, + NotDeclaredByApp = 1, + DeniedByUser = 2, + UserPromptRequired = 3, + Allowed = 4 +}; + +DECLARE_INTERFACE_IID_(IResearchModeSensorDevice, IUnknown, "65E8CC3C-3A03-4006-AE0D-34E1150058CC") +{ + STDMETHOD(DisableEyeSelection()) = 0; + STDMETHOD(EnableEyeSelection()) = 0; + + STDMETHOD(GetSensorCount( + _Out_ size_t *pOutCount)) = 0; + STDMETHOD(GetSensorDescriptors( + _Out_writes_(sensorCount) ResearchModeSensorDescriptor *pSensorDescriptorData, + size_t sensorCount, + _Out_ size_t *pOutCount)) = 0; + STDMETHOD(GetSensor( + ResearchModeSensorType sensorType, + _Outptr_result_nullonfailure_ IResearchModeSensor **ppSensor)) = 0; +}; + +DECLARE_INTERFACE_IID_(IResearchModeSensorDevicePerception, IUnknown, "C1678F4B-ECB4-47A8-B6FA-97DBF4417DB2") +{ + STDMETHOD(GetRigNodeId( + _Outptr_ GUID *pRigNodeId)) = 0; +}; + +DECLARE_INTERFACE_IID_(IResearchModeSensorDeviceConsent, IUnknown, "EAB9D672-9A88-4E43-8A69-9BA8f23A4C76") +{ + STDMETHOD_(HRESULT, RequestCamAccessAsync)(void (*camCallback)(ResearchModeSensorConsent))= 0; + STDMETHOD_(HRESULT, RequestIMUAccessAsync)(void (*imuCallback)(ResearchModeSensorConsent)) = 0; +}; + +DECLARE_INTERFACE_IID_(IResearchModeSensor, IUnknown, "4D4D1D4B-9FDD-4001-BA1E-F8FAB1DA14D0") +{ + STDMETHOD(OpenStream()) = 0; + STDMETHOD(CloseStream()) = 0; + STDMETHOD_(LPCWSTR, GetFriendlyName)() = 0; + STDMETHOD_(ResearchModeSensorType, GetSensorType)() = 0; + + STDMETHOD(GetSampleBufferSize( + _Out_ size_t *pSampleBufferSize)) = 0; + STDMETHOD(GetNextBuffer( + _Outptr_result_nullonfailure_ IResearchModeSensorFrame **ppSensorFrame)) = 0; +}; + +DECLARE_INTERFACE_IID_(IResearchModeCameraSensor, IUnknown, "3BDB4977-960B-4F5D-8CA3-D21E68F26E76") +{ + STDMETHOD(MapImagePointToCameraUnitPlane( + float (&uv) [2], + float (&xy) [2])) = 0; + STDMETHOD(MapCameraSpaceToImagePoint( + float(&xy)[2], + float(&uv)[2])) = 0; + STDMETHOD(GetCameraExtrinsicsMatrix(DirectX::XMFLOAT4X4 *pCameraViewMatrix)) = 0; +}; + +DECLARE_INTERFACE_IID_(IResearchModeAccelSensor, IUnknown, "627A7FAA-55EA-4951-B370-26186395AAB5") +{ + STDMETHOD(GetExtrinsicsMatrix(DirectX::XMFLOAT4X4 *pAccel)) = 0; +}; + +DECLARE_INTERFACE_IID_(IResearchModeGyroSensor, IUnknown, "E6E8B36F-E6E7-494C-B4A8-7CFA2561BEE7") +{ + STDMETHOD(GetExtrinsicsMatrix(DirectX::XMFLOAT4X4 *pGyro)) = 0; +}; + +DECLARE_INTERFACE_IID_(IResearchModeMagSensor, IUnknown, "CB082E34-1C69-445D-A91A-43CE96B3655E") +{ +}; + +DECLARE_INTERFACE_IID_(IResearchModeDepthSensor, IUnknown, "CC317D10-C26E-45B2-B91B-0E4571486CEC") +{ +}; + +DECLARE_INTERFACE_IID_(IResearchModeSensorFrame, IUnknown, "73479614-89C9-4FFD-9C16-615BC32C6A09") +{ + STDMETHOD(GetResolution( + _Out_ ResearchModeSensorResolution *pResolution)) = 0; + // For frames with batched samples this returns the time stamp for the first sample in the frame. + STDMETHOD(GetTimeStamp( + _Out_ ResearchModeSensorTimestamp *pTimeStamp)) = 0; +}; + +DECLARE_INTERFACE_IID_(IResearchModeSensorVLCFrame, IUnknown, "5C693123-3851-4FDC-A2D9-51C68AF53976") +{ + STDMETHOD(GetBuffer( + _Outptr_ const BYTE **ppBytes, + _Out_ size_t *pBufferOutLength)) = 0; + STDMETHOD(GetGain( + _Out_ UINT32 *pGain)) = 0; + STDMETHOD(GetExposure( + _Out_ UINT64 *pExposure)) = 0; +}; + +DECLARE_INTERFACE_IID_(IResearchModeSensorDepthFrame, IUnknown, "35167E38-E020-43D9-898E-6CB917AD86D3") +{ + STDMETHOD(GetBuffer( + _Outptr_ const UINT16 **ppBytes, + _Out_ size_t *pBufferOutLength)) = 0; + STDMETHOD(GetAbDepthBuffer( + _Outptr_ const UINT16 **ppBytes, + _Out_ size_t *pBufferOutLength)) = 0; + STDMETHOD(GetSigmaBuffer( + _Outptr_ const BYTE **ppBytes, + _Out_ size_t *pBufferOutLength)) = 0; +}; + +DECLARE_INTERFACE_IID_(IResearchModeAccelFrame, IUnknown, "42AA75F8-E3FE-4C25-88C6-F2ECE1E8A2C5") +{ + STDMETHOD(GetCalibratedAccelaration( + _Out_ DirectX::XMFLOAT3 *pAccel)) = 0; + STDMETHOD(GetCalibratedAccelarationSamples( + _Outptr_ const AccelDataStruct **ppAccelBuffer, + _Out_ size_t *pBufferOutLength)) = 0; +}; + +DECLARE_INTERFACE_IID_(IResearchModeGyroFrame, IUnknown, "4C0C5EE7-CBB8-4A15-A81F-943785F524A6") +{ + STDMETHOD(GetCalibratedGyro( + _Out_ DirectX::XMFLOAT3 *pGyro)) = 0; + STDMETHOD(GetCalibratedGyroSamples( + _Outptr_ const GyroDataStruct **ppAccelBuffer, + _Out_ size_t *pBufferOutLength)) = 0; +}; + +DECLARE_INTERFACE_IID_(IResearchModeMagFrame, IUnknown, "2376C9D2-7F3D-456E-A39E-3B7730DDA9E5") +{ + STDMETHOD(GetMagnetometer( + _Out_ DirectX::XMFLOAT3 *pMag)) = 0; + STDMETHOD(GetMagnetometerSamples( + _Outptr_ const MagDataStruct **ppMagBuffer, + _Out_ size_t *pBufferOutLength)) = 0; +}; + +HRESULT CreateResearchModeSensorDevice( + _Outptr_result_nullonfailure_ IResearchModeSensorDevice **ppSensorDevice); + diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeCameraSensor.cpp b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeCameraSensor.cpp new file mode 100644 index 000000000..4af1e5393 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeCameraSensor.cpp @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#include "pch.h" +#include "ResearchModeCameraSensor.h" +#include "ResearchModeCameraSensor.g.cpp" +#include "ResearchModeSensorDepthFrame.h" +#include "ResearchModeSensorVlcFrame.h" + +using namespace winrt::Windows::Foundation; +using namespace winrt::Windows::Foundation::Numerics; + +namespace winrt::HoloLens2ResearchMode::implementation +{ + ResearchModeCameraSensor::ResearchModeCameraSensor(::IResearchModeSensor* pSensor) + { + m_pSensor.attach(pSensor); + m_pCameraSensor = m_pSensor.as<::IResearchModeCameraSensor>(); + m_sensorType = static_cast(m_pSensor->GetSensorType()); + } + + int32_t ResearchModeCameraSensor::MapImagePointToCameraUnitPlane(winrt::Windows::Foundation::Point const& uv, winrt::Windows::Foundation::Point& xy) noexcept + { + float xyVal[2]; + HRESULT hr = m_pCameraSensor->MapImagePointToCameraUnitPlane(*reinterpret_cast(&const_cast(uv)), xyVal); + xy = Point(xyVal[0], xyVal[1]); + return hr; + } + + int32_t ResearchModeCameraSensor::MapCameraSpaceToImagePoint(winrt::Windows::Foundation::Point const& xy, winrt::Windows::Foundation::Point& uv) noexcept + { + float uvVal[2]; + HRESULT hr = m_pCameraSensor->MapCameraSpaceToImagePoint(*reinterpret_cast(&const_cast(xy)), uvVal); + uv = Point(uvVal[0], uvVal[1]); + return hr; + } + + winrt::Windows::Foundation::Numerics::float4x4 ResearchModeCameraSensor::GetCameraExtrinsicsMatrix() + { + float4x4 cameraViewMatrix; + HRESULT hr = m_pCameraSensor->GetCameraExtrinsicsMatrix(reinterpret_cast(&cameraViewMatrix)); + winrt::check_hresult(hr); + return cameraViewMatrix; + } + + void ResearchModeCameraSensor::OpenStream() + { + winrt::check_hresult(m_pSensor->OpenStream()); + } + + void ResearchModeCameraSensor::CloseStream() + { + winrt::check_hresult(m_pSensor->CloseStream()); + } + + hstring ResearchModeCameraSensor::GetFriendlyName() + { + return m_pSensor->GetFriendlyName(); + } + + winrt::HoloLens2ResearchMode::ResearchModeSensorType ResearchModeCameraSensor::GetSensorType() + { + return m_sensorType; + } + + winrt::HoloLens2ResearchMode::IResearchModeSensorFrame ResearchModeCameraSensor::GetNextBuffer() + { + ::IResearchModeSensorFrame* pSensorFrame = nullptr; + HRESULT hr = m_pSensor->GetNextBuffer(&pSensorFrame); + winrt::check_hresult(hr); + + switch (m_sensorType) + { + case ResearchModeSensorType::DepthAhat: + case ResearchModeSensorType::DepthLongThrow: + return winrt::make(pSensorFrame); + + case ResearchModeSensorType::LeftFront: + case ResearchModeSensorType::LeftLeft: + case ResearchModeSensorType::RightFront: + case ResearchModeSensorType::RightRight: + return winrt::make(pSensorFrame); + + default: + return nullptr; + } + } +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeCameraSensor.h b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeCameraSensor.h new file mode 100644 index 000000000..f9bc1d688 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeCameraSensor.h @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#pragma once +#include "ResearchModeCameraSensor.g.h" + +namespace winrt::HoloLens2ResearchMode::implementation +{ + struct ResearchModeCameraSensor : ResearchModeCameraSensorT + { + // Implementation-only constructor + ResearchModeCameraSensor(::IResearchModeSensor* pSensor); + + int32_t MapImagePointToCameraUnitPlane(winrt::Windows::Foundation::Point const& uv, winrt::Windows::Foundation::Point& xy) noexcept; + int32_t MapCameraSpaceToImagePoint(winrt::Windows::Foundation::Point const& xy, winrt::Windows::Foundation::Point& uv) noexcept; + winrt::Windows::Foundation::Numerics::float4x4 GetCameraExtrinsicsMatrix(); + void OpenStream(); + void CloseStream(); + hstring GetFriendlyName(); + winrt::HoloLens2ResearchMode::ResearchModeSensorType GetSensorType(); + winrt::HoloLens2ResearchMode::IResearchModeSensorFrame GetNextBuffer(); + + winrt::com_ptr<::IResearchModeSensor> m_pSensor; + winrt::com_ptr<::IResearchModeCameraSensor> m_pCameraSensor; + ResearchModeSensorType m_sensorType; + }; +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeGyroFrame.cpp b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeGyroFrame.cpp new file mode 100644 index 000000000..265a39ff2 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeGyroFrame.cpp @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#include "pch.h" +#include "ResearchModeGyroFrame.h" +#include "ResearchModeGyroFrame.g.cpp" + +namespace winrt::HoloLens2ResearchMode::implementation +{ + ResearchModeGyroFrame::ResearchModeGyroFrame(::IResearchModeSensorFrame* pSensorFrame) + { + m_pSensorFrame.attach(pSensorFrame); + m_pGyroFrame = m_pSensorFrame.as<::IResearchModeGyroFrame>(); + } + + com_array ResearchModeGyroFrame::GetCalibratedGyroSamples() + { + const ::GyroDataStruct* pBuffer = nullptr; // note: this is the non-IDL-generated version from ResearchModeApi.h + size_t bufferLength = 0; + HRESULT hr = m_pGyroFrame->GetCalibratedGyroSamples(&pBuffer, &bufferLength); + winrt::check_hresult(hr); + const GyroDataStruct* pBuffer2 = (GyroDataStruct*)(pBuffer); // cast to IDL-generated version + return winrt::com_array(pBuffer2, pBuffer2 + bufferLength); + } + + winrt::HoloLens2ResearchMode::ResearchModeSensorResolution ResearchModeGyroFrame::GetResolution() + { + ::ResearchModeSensorResolution resolution; + winrt::check_hresult(m_pSensorFrame->GetResolution(&resolution)); + return *(reinterpret_cast(&resolution)); + } + + winrt::HoloLens2ResearchMode::ResearchModeSensorTimestamp ResearchModeGyroFrame::GetTimeStamp() + { + ::ResearchModeSensorTimestamp timestamp; + winrt::check_hresult(m_pSensorFrame->GetTimeStamp(×tamp)); + return *(reinterpret_cast(×tamp)); + } +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeGyroFrame.h b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeGyroFrame.h new file mode 100644 index 000000000..e4f352f8e --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeGyroFrame.h @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#pragma once +#include "ResearchModeGyroFrame.g.h" + +namespace winrt::HoloLens2ResearchMode::implementation +{ + struct ResearchModeGyroFrame : ResearchModeGyroFrameT + { + ResearchModeGyroFrame(::IResearchModeSensorFrame* pSensorFrame); + + com_array GetCalibratedGyroSamples(); + + winrt::HoloLens2ResearchMode::ResearchModeSensorResolution GetResolution(); + winrt::HoloLens2ResearchMode::ResearchModeSensorTimestamp GetTimeStamp(); + + winrt::com_ptr<::IResearchModeSensorFrame> m_pSensorFrame; + winrt::com_ptr<::IResearchModeGyroFrame> m_pGyroFrame; + }; +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeImuSensor.cpp b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeImuSensor.cpp new file mode 100644 index 000000000..3877b9f83 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeImuSensor.cpp @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#include "pch.h" +#include "ResearchModeImuSensor.h" +#include "ResearchModeImuSensor.g.cpp" +#include "ResearchModeAccelFrame.h" +#include "ResearchModeGyroFrame.h" +#include "ResearchModeMagFrame.h" + +using namespace winrt::Windows::Foundation; +using namespace winrt::Windows::Foundation::Numerics; + +namespace winrt::HoloLens2ResearchMode::implementation +{ + ResearchModeImuSensor::ResearchModeImuSensor(::IResearchModeSensor* pSensor) + { + m_pSensor.attach(pSensor); + m_sensorType = static_cast(m_pSensor->GetSensorType()); + } + + void ResearchModeImuSensor::OpenStream() + { + winrt::check_hresult(m_pSensor->OpenStream()); + } + + void ResearchModeImuSensor::CloseStream() + { + winrt::check_hresult(m_pSensor->CloseStream()); + } + + hstring ResearchModeImuSensor::GetFriendlyName() + { + return m_pSensor->GetFriendlyName(); + } + + winrt::HoloLens2ResearchMode::ResearchModeSensorType ResearchModeImuSensor::GetSensorType() + { + return m_sensorType; + } + + winrt::HoloLens2ResearchMode::IResearchModeSensorFrame ResearchModeImuSensor::GetNextBuffer() + { + ::IResearchModeSensorFrame* pSensorFrame = nullptr; + HRESULT hr = m_pSensor->GetNextBuffer(&pSensorFrame); + winrt::check_hresult(hr); + + switch (m_sensorType) + { + case ResearchModeSensorType::ImuAccel: + return winrt::make(pSensorFrame); + case ResearchModeSensorType::ImuGyro: + return winrt::make(pSensorFrame); + case ResearchModeSensorType::ImuMag: + return winrt::make(pSensorFrame); + default: + return nullptr; + } + } +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeImuSensor.h b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeImuSensor.h new file mode 100644 index 000000000..8da8f5d32 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeImuSensor.h @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#pragma once +#include "ResearchModeImuSensor.g.h" + +namespace winrt::HoloLens2ResearchMode::implementation +{ + struct ResearchModeImuSensor : ResearchModeImuSensorT + { + // Implementation-only constructor + ResearchModeImuSensor(::IResearchModeSensor* pSensor); + + void OpenStream(); + void CloseStream(); + hstring GetFriendlyName(); + winrt::HoloLens2ResearchMode::ResearchModeSensorType GetSensorType(); + winrt::HoloLens2ResearchMode::IResearchModeSensorFrame GetNextBuffer(); + + winrt::com_ptr<::IResearchModeSensor> m_pSensor; + ResearchModeSensorType m_sensorType; + }; +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeMagFrame.cpp b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeMagFrame.cpp new file mode 100644 index 000000000..c85fe9c76 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeMagFrame.cpp @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#include "pch.h" +#include "ResearchModeMagFrame.h" +#include "ResearchModeMagFrame.g.cpp" + +namespace winrt::HoloLens2ResearchMode::implementation +{ + ResearchModeMagFrame::ResearchModeMagFrame(::IResearchModeSensorFrame* pSensorFrame) + { + m_pSensorFrame.attach(pSensorFrame); + m_pMagFrame = m_pSensorFrame.as<::IResearchModeMagFrame>(); + } + + com_array ResearchModeMagFrame::GetMagnetometerSamples() + { + const ::MagDataStruct* pBuffer = nullptr; // note: this is the non-IDL-generated version from ResearchModeApi.h + size_t bufferLength = 0; + HRESULT hr = m_pMagFrame->GetMagnetometerSamples(&pBuffer, &bufferLength); + winrt::check_hresult(hr); + const MagDataStruct* pBuffer2 = (MagDataStruct*)(pBuffer); // cast to IDL-generated version + return winrt::com_array(pBuffer2, pBuffer2 + bufferLength); + } + + winrt::HoloLens2ResearchMode::ResearchModeSensorResolution ResearchModeMagFrame::GetResolution() + { + ::ResearchModeSensorResolution resolution; + winrt::check_hresult(m_pSensorFrame->GetResolution(&resolution)); + return *(reinterpret_cast(&resolution)); + } + + winrt::HoloLens2ResearchMode::ResearchModeSensorTimestamp ResearchModeMagFrame::GetTimeStamp() + { + ::ResearchModeSensorTimestamp timestamp; + winrt::check_hresult(m_pSensorFrame->GetTimeStamp(×tamp)); + return *(reinterpret_cast(×tamp)); + } +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeMagFrame.h b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeMagFrame.h new file mode 100644 index 000000000..2c4ba64da --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeMagFrame.h @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#pragma once +#include "ResearchModeMagFrame.g.h" + +namespace winrt::HoloLens2ResearchMode::implementation +{ + struct ResearchModeMagFrame : ResearchModeMagFrameT + { + ResearchModeMagFrame(::IResearchModeSensorFrame* pSensorFrame); + + com_array GetMagnetometerSamples(); + + winrt::HoloLens2ResearchMode::ResearchModeSensorResolution GetResolution(); + winrt::HoloLens2ResearchMode::ResearchModeSensorTimestamp GetTimeStamp(); + + winrt::com_ptr<::IResearchModeSensorFrame> m_pSensorFrame; + winrt::com_ptr<::IResearchModeMagFrame> m_pMagFrame; + }; +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDepthFrame.cpp b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDepthFrame.cpp new file mode 100644 index 000000000..32ea20018 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDepthFrame.cpp @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#include "pch.h" +#include "ResearchModeSensorDepthFrame.h" +#include "ResearchModeSensorDepthFrame.g.cpp" + +namespace winrt::HoloLens2ResearchMode::implementation +{ + ResearchModeSensorDepthFrame::ResearchModeSensorDepthFrame(::IResearchModeSensorFrame* pSensorFrame) + { + m_pSensorFrame.attach(pSensorFrame); + m_pDepthFrame = m_pSensorFrame.as<::IResearchModeSensorDepthFrame>(); + } + + com_array ResearchModeSensorDepthFrame::GetBuffer() + { + const UINT16* pBuffer = nullptr; + size_t bufferLength = 0; + + HRESULT hr = m_pDepthFrame->GetBuffer(&pBuffer, &bufferLength); + winrt::check_hresult(hr); + return winrt::com_array(pBuffer, pBuffer + bufferLength); + } + + com_array ResearchModeSensorDepthFrame::GetAbDepthBuffer() + { + const UINT16* pBuffer = nullptr; + size_t bufferLength = 0; + + HRESULT hr = m_pDepthFrame->GetAbDepthBuffer(&pBuffer, &bufferLength); + winrt::check_hresult(hr); + return winrt::com_array(pBuffer, pBuffer + bufferLength); + } + + com_array ResearchModeSensorDepthFrame::GetSigmaBuffer() + { + const BYTE* pBuffer = nullptr; + size_t bufferLength = 0; + + HRESULT hr = m_pDepthFrame->GetSigmaBuffer(&pBuffer, &bufferLength); + winrt::check_hresult(hr); + return winrt::com_array(pBuffer, pBuffer + bufferLength); + } + + winrt::HoloLens2ResearchMode::ResearchModeSensorResolution ResearchModeSensorDepthFrame::GetResolution() + { + ::ResearchModeSensorResolution resolution; + winrt::check_hresult(m_pSensorFrame->GetResolution(&resolution)); + return *(reinterpret_cast(&resolution)); + } + + winrt::HoloLens2ResearchMode::ResearchModeSensorTimestamp ResearchModeSensorDepthFrame::GetTimeStamp() + { + ::ResearchModeSensorTimestamp timestamp; + winrt::check_hresult(m_pSensorFrame->GetTimeStamp(×tamp)); + return *(reinterpret_cast(×tamp)); + } +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDepthFrame.h b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDepthFrame.h new file mode 100644 index 000000000..3f7c2f29e --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDepthFrame.h @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#pragma once +#include "ResearchModeSensorDepthFrame.g.h" + +namespace winrt::HoloLens2ResearchMode::implementation +{ + struct ResearchModeSensorDepthFrame : ResearchModeSensorDepthFrameT + { + ResearchModeSensorDepthFrame(::IResearchModeSensorFrame* pSensorFrame); + + com_array GetBuffer(); + com_array GetAbDepthBuffer(); + com_array GetSigmaBuffer(); + winrt::HoloLens2ResearchMode::ResearchModeSensorResolution GetResolution(); + winrt::HoloLens2ResearchMode::ResearchModeSensorTimestamp GetTimeStamp(); + + winrt::com_ptr<::IResearchModeSensorFrame> m_pSensorFrame; + winrt::com_ptr<::IResearchModeSensorDepthFrame> m_pDepthFrame; + }; +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDevice.cpp b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDevice.cpp new file mode 100644 index 000000000..d33af8d10 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDevice.cpp @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#include "pch.h" +#include +#include +#include "ResearchModeSensorDevice.h" +#include "ResearchModeSensorDevice.g.cpp" +#include "ResearchModeCameraSensor.h" +#include "ResearchModeImuSensor.h" +#include "ResearchModeApi.h" + +namespace winrt::HoloLens2ResearchMode::implementation +{ + extern "C" + HMODULE LoadLibraryA( + LPCSTR lpLibFileName + ); + + // The following statics are to support the callback from IResearchModeSensorDeviceConsent::RequestCam/IMUAccessAsync() + static ResearchModeSensorConsent camAccessCheck = ResearchModeSensorConsent::DeniedBySystem; + static ResearchModeSensorConsent imuAccessCheck = ResearchModeSensorConsent::DeniedBySystem; + static HANDLE camConsentGiven = CreateEvent(nullptr, true, false, nullptr); + static HANDLE imuConsentGiven = CreateEvent(nullptr, true, false, nullptr); + + ResearchModeSensorDevice::ResearchModeSensorDevice() + { + // Load Research Mode library + HMODULE hrResearchMode = LoadLibraryA("ResearchModeAPI"); + winrt::check_pointer(hrResearchMode); + + typedef HRESULT(__cdecl* PFN_CREATEPROVIDER) (::IResearchModeSensorDevice** ppSensorDevice); + + PFN_CREATEPROVIDER pfnCreate = reinterpret_cast(GetProcAddress(hrResearchMode, "CreateResearchModeSensorDevice")); + winrt::check_pointer(pfnCreate); + + HRESULT hr = pfnCreate(m_pSensorDevice.put()); + winrt::check_hresult(hr); + + m_pSensorDeviceConsent = m_pSensorDevice.as<::IResearchModeSensorDeviceConsent>(); + + winrt::check_pointer(camConsentGiven); + } + + int32_t ResearchModeSensorDevice::GetSensorCount() + { + size_t sensorCount = 0; + HRESULT hr = m_pSensorDevice->GetSensorCount(&sensorCount); + winrt::check_hresult(hr); + return static_cast(sensorCount); + } + + void ResearchModeSensorDevice::DisableEyeSelection() + { + winrt::check_hresult(m_pSensorDevice->DisableEyeSelection()); + } + + void ResearchModeSensorDevice::EnableEyeSelection() + { + winrt::check_hresult(m_pSensorDevice->EnableEyeSelection()); + } + + winrt::com_array ResearchModeSensorDevice::GetSensorDescriptors() + { + size_t sensorCount = 0; + + HRESULT hr = m_pSensorDevice->GetSensorCount(&sensorCount); + winrt::check_hresult(hr); + + std::vector sensorDescriptors; + sensorDescriptors.resize(sensorCount); + + hr = m_pSensorDevice->GetSensorDescriptors(reinterpret_cast<::ResearchModeSensorDescriptor*>(sensorDescriptors.data()), sensorDescriptors.size(), &sensorCount); + winrt::check_hresult(hr); + + return winrt::com_array(sensorDescriptors); + } + + winrt::HoloLens2ResearchMode::IResearchModeSensor ResearchModeSensorDevice::GetSensor(winrt::HoloLens2ResearchMode::ResearchModeSensorType const& sensorType) + { + ::IResearchModeSensor* pSensor = nullptr; + + HRESULT hr = m_pSensorDevice->GetSensor((::ResearchModeSensorType)sensorType, &pSensor); + winrt::check_hresult(hr); + + switch (sensorType) + { + case ResearchModeSensorType::LeftFront: + case ResearchModeSensorType::LeftLeft: + case ResearchModeSensorType::RightFront: + case ResearchModeSensorType::RightRight: + case ResearchModeSensorType::DepthAhat: + case ResearchModeSensorType::DepthLongThrow: + return winrt::make(pSensor); + break; + + case ResearchModeSensorType::ImuAccel: + case ResearchModeSensorType::ImuGyro: + case ResearchModeSensorType::ImuMag: + return winrt::make(pSensor); + + default: + throw winrt::hresult_invalid_argument(); + } + } + + winrt::guid ResearchModeSensorDevice::GetRigNodeId() + { + GUID rigNodeGuid; + auto pSensorDevicePerception = m_pSensorDevice.as<::IResearchModeSensorDevicePerception>(); + HRESULT hr = pSensorDevicePerception->GetRigNodeId(&rigNodeGuid); + winrt::check_hresult(hr); + return rigNodeGuid; + } + + winrt::Windows::Foundation::IAsyncOperation ResearchModeSensorDevice::RequestCameraAccessAsync() + { + // Check if consent already obtained + if (WaitForSingleObject(camConsentGiven, 0) == WAIT_OBJECT_0) + { + co_return camAccessCheck; + } + + winrt::check_hresult(m_pSensorDeviceConsent->RequestCamAccessAsync( + [](::ResearchModeSensorConsent consent) + { + camAccessCheck = static_cast(consent); + SetEvent(camConsentGiven); + })); + + // Return control to the caller and wait for the result + co_await winrt::resume_background(); + + if (WaitForSingleObject(camConsentGiven, INFINITE) != WAIT_OBJECT_0) + { + winrt::throw_last_error(); + } + + co_return camAccessCheck; + } + + winrt::Windows::Foundation::IAsyncOperation ResearchModeSensorDevice::RequestIMUAccessAsync() + { + // Check if consent already obtained + if (WaitForSingleObject(imuConsentGiven, 0) == WAIT_OBJECT_0) + { + co_return imuAccessCheck; + } + + winrt::check_hresult(m_pSensorDeviceConsent->RequestIMUAccessAsync( + [](::ResearchModeSensorConsent consent) + { + imuAccessCheck = static_cast(consent); + SetEvent(imuConsentGiven); + })); + + // Return control to the caller and wait for the result + co_await winrt::resume_background(); + + if (WaitForSingleObject(imuConsentGiven, INFINITE) != WAIT_OBJECT_0) + { + winrt::throw_last_error(); + } + + co_return imuAccessCheck; + } +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDevice.h b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDevice.h new file mode 100644 index 000000000..7c7da9209 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDevice.h @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#pragma once +#include "ResearchModeSensorDevice.g.h" + +namespace winrt::HoloLens2ResearchMode::implementation +{ + struct ResearchModeSensorDevice : ResearchModeSensorDeviceT + { + ResearchModeSensorDevice(); + + int32_t GetSensorCount(); + void DisableEyeSelection(); + void EnableEyeSelection(); + winrt::com_array GetSensorDescriptors(); + winrt::HoloLens2ResearchMode::IResearchModeSensor GetSensor(winrt::HoloLens2ResearchMode::ResearchModeSensorType const& sensorType); + winrt::guid GetRigNodeId(); + winrt::Windows::Foundation::IAsyncOperation RequestCameraAccessAsync(); + winrt::Windows::Foundation::IAsyncOperation RequestIMUAccessAsync(); + + winrt::com_ptr<::IResearchModeSensorDevice> m_pSensorDevice; + winrt::com_ptr<::IResearchModeSensorDeviceConsent> m_pSensorDeviceConsent; + }; +} + +namespace winrt::HoloLens2ResearchMode::factory_implementation +{ + struct ResearchModeSensorDevice : ResearchModeSensorDeviceT + { + }; +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDevice.idl b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDevice.idl new file mode 100644 index 000000000..09ad9a71d --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorDevice.idl @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace HoloLens2ResearchMode +{ + enum ResearchModeSensorType + { + LeftFront, + LeftLeft, + RightFront, + RightRight, + DepthAhat, + DepthLongThrow, + ImuAccel, + ImuGyro, + ImuMag, + }; + + struct Luid { + UInt32 LowPart; + Int32 HighPart; + }; + + struct ResearchModeSensorDescriptor + { + Luid sensorId; + ResearchModeSensorType sensorType; + }; + + enum ResearchModeSensorTimestampSource + { + UsbSof = 0, + Unknown = 1, + CenterOfExposure = 2, + Count = 3, + }; + + struct ResearchModeSensorTimestamp { + ResearchModeSensorTimestampSource Source; + UInt64 SensorTicks; + UInt64 SensorTicksPerSecond; + UInt64 HostTicks; + UInt64 HostTicksPerSecond; + }; + + struct ResearchModeSensorResolution + { + UInt32 Width; + UInt32 Height; + UInt32 Stride; + UInt32 BitsPerPixel; + UInt32 BytesPerPixel; + }; + + struct AccelDataStruct + { + UInt64 VinylHupTicks; + UInt64 SocTicks; + Single X; + Single Y; + Single Z; + Single temperature; + }; + + struct GyroDataStruct + { + UInt64 VinylHupTicks; + UInt64 SocTicks; + Single X; + Single Y; + Single Z; + Single temperature; + }; + + struct MagDataStruct + { + UInt64 VinylHupTicks; + UInt64 SocTicks; + Single X; + Single Y; + Single Z; + }; + + enum ResearchModeSensorConsent + { + DeniedBySystem = 0, + NotDeclaredByApp = 1, + DeniedByUser = 2, + UserPromptRequired = 3, + Allowed = 4, + }; + + runtimeclass ResearchModeSensorDevice : IResearchModeSensorDevicePerception, IResearchModeSensorDeviceConsent + { + ResearchModeSensorDevice(); + + Int32 GetSensorCount(); + void DisableEyeSelection(); + void EnableEyeSelection(); + ResearchModeSensorDescriptor[] GetSensorDescriptors(); + IResearchModeSensor GetSensor(ResearchModeSensorType sensorType); + }; + + runtimeclass ResearchModeCameraSensor : [default] IResearchModeSensor + { + // Note: the map methods return the HRESULT instead of throwing so we can detect legitimate failures + [noexcept] Int32 MapImagePointToCameraUnitPlane(Windows.Foundation.Point uv, out Windows.Foundation.Point xy); + [noexcept] Int32 MapCameraSpaceToImagePoint(Windows.Foundation.Point xy, out Windows.Foundation.Point uv); + + Windows.Foundation.Numerics.Matrix4x4 GetCameraExtrinsicsMatrix(); + }; + + runtimeclass ResearchModeImuSensor : [default] IResearchModeSensor + { + }; + + interface IResearchModeSensorDevicePerception + { + Guid GetRigNodeId(); + }; + + interface IResearchModeSensorDeviceConsent + { + Windows.Foundation.IAsyncOperation RequestCameraAccessAsync(); + Windows.Foundation.IAsyncOperation RequestIMUAccessAsync(); + }; + + interface IResearchModeSensorFrame + { + ResearchModeSensorResolution GetResolution(); + + // For frames with batched samples this returns the time stamp for the first sample in the frame. + ResearchModeSensorTimestamp GetTimeStamp(); + }; + + runtimeclass ResearchModeSensorVlcFrame : IResearchModeSensorFrame + { + UInt8[] GetBuffer(); + UInt32 GetGain(); + UInt64 GetExposure(); + }; + + runtimeclass ResearchModeSensorDepthFrame : IResearchModeSensorFrame + { + UInt16[] GetBuffer(); + UInt16[] GetAbDepthBuffer(); + UInt8[] GetSigmaBuffer(); + }; + + runtimeclass ResearchModeAccelFrame : IResearchModeSensorFrame + { + AccelDataStruct[] GetCalibratedAccelarationSamples(); + }; + + runtimeclass ResearchModeGyroFrame : IResearchModeSensorFrame + { + GyroDataStruct[] GetCalibratedGyroSamples(); + }; + + runtimeclass ResearchModeMagFrame : IResearchModeSensorFrame + { + MagDataStruct[] GetMagnetometerSamples(); + }; + + interface IResearchModeSensor + { + void OpenStream(); + void CloseStream(); + String GetFriendlyName(); + ResearchModeSensorType GetSensorType(); + IResearchModeSensorFrame GetNextBuffer(); + }; +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorVlcFrame.cpp b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorVlcFrame.cpp new file mode 100644 index 000000000..2ec85b72c --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorVlcFrame.cpp @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#include "pch.h" +#include "ResearchModeSensorVlcFrame.h" +#include "ResearchModeSensorVlcFrame.g.cpp" + +namespace winrt::HoloLens2ResearchMode::implementation +{ + ResearchModeSensorVlcFrame::ResearchModeSensorVlcFrame(::IResearchModeSensorFrame* pSensorFrame) + { + m_pSensorFrame.attach(pSensorFrame); + m_pVlcFrame = m_pSensorFrame.as<::IResearchModeSensorVLCFrame>(); + } + + com_array ResearchModeSensorVlcFrame::GetBuffer() + { + const BYTE* pBuffer = nullptr; + size_t bufferLength = 0; + + HRESULT hr = m_pVlcFrame->GetBuffer(&pBuffer, &bufferLength); + winrt::check_hresult(hr); + return winrt::com_array(pBuffer, pBuffer + bufferLength); + } + + uint32_t ResearchModeSensorVlcFrame::GetGain() + { + UINT32 gain = 0; + + HRESULT hr = m_pVlcFrame->GetGain(&gain); + winrt::check_hresult(hr); + return gain; + } + + uint64_t ResearchModeSensorVlcFrame::GetExposure() + { + UINT64 exposure = 0; + + HRESULT hr = m_pVlcFrame->GetExposure(&exposure); + winrt::check_hresult(hr); + return exposure; + } + + winrt::HoloLens2ResearchMode::ResearchModeSensorResolution ResearchModeSensorVlcFrame::GetResolution() + { + ::ResearchModeSensorResolution resolution; + winrt::check_hresult(m_pSensorFrame->GetResolution(&resolution)); + return *(reinterpret_cast(&resolution)); + } + + winrt::HoloLens2ResearchMode::ResearchModeSensorTimestamp ResearchModeSensorVlcFrame::GetTimeStamp() + { + ::ResearchModeSensorTimestamp timestamp; + winrt::check_hresult(m_pSensorFrame->GetTimeStamp(×tamp)); + return *(reinterpret_cast(×tamp)); + } +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorVlcFrame.h b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorVlcFrame.h new file mode 100644 index 000000000..953386a41 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/ResearchModeSensorVlcFrame.h @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#pragma once +#include "ResearchModeSensorVlcFrame.g.h" + +namespace winrt::HoloLens2ResearchMode::implementation +{ + struct ResearchModeSensorVlcFrame : ResearchModeSensorVlcFrameT + { + ResearchModeSensorVlcFrame(::IResearchModeSensorFrame* pSensorFrame); + + com_array GetBuffer(); + uint32_t GetGain(); + uint64_t GetExposure(); + winrt::HoloLens2ResearchMode::ResearchModeSensorResolution GetResolution(); + winrt::HoloLens2ResearchMode::ResearchModeSensorTimestamp GetTimeStamp(); + + winrt::com_ptr<::IResearchModeSensorFrame> m_pSensorFrame; + winrt::com_ptr<::IResearchModeSensorVLCFrame> m_pVlcFrame; + }; +} diff --git a/Sources/MixedReality/HoloLens2ResearchMode/packages.config b/Sources/MixedReality/HoloLens2ResearchMode/packages.config new file mode 100644 index 000000000..70bf1926d --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/packages.config @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/Sources/MixedReality/HoloLens2ResearchMode/pch.cpp b/Sources/MixedReality/HoloLens2ResearchMode/pch.cpp new file mode 100644 index 000000000..9f5175684 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/pch.cpp @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#include "pch.h" diff --git a/Sources/MixedReality/HoloLens2ResearchMode/pch.h b/Sources/MixedReality/HoloLens2ResearchMode/pch.h new file mode 100644 index 000000000..84f78b003 --- /dev/null +++ b/Sources/MixedReality/HoloLens2ResearchMode/pch.h @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#pragma once +#include +#include +#include +#include +#include "ResearchModeApi.h" diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Accelerometer.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Accelerometer.cs new file mode 100644 index 000000000..9a8219472 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Accelerometer.cs @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System.Numerics; + using HoloLens2ResearchMode; + using Microsoft.Psi; + + /// + /// Source component that publishes accelerometer data on a stream. + /// + public class Accelerometer : ResearchModeImu + { + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + public Accelerometer(Pipeline pipeline) + : base(pipeline, ResearchModeSensorType.ImuAccel) + { + } + + /// + protected override void ProcessSensorFrame(IResearchModeSensorFrame sensorFrame) + { + this.PostSamples( + sensorFrame, + (sensorFrame as ResearchModeAccelFrame).GetCalibratedAccelarationSamples(), + f => (f.X, f.Y, f.Z), + f => f.VinylHupTicks); + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/DepthCamera.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/DepthCamera.cs new file mode 100644 index 000000000..859e8ab18 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/DepthCamera.cs @@ -0,0 +1,144 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using System.Diagnostics; + using HoloLens2ResearchMode; + using Microsoft.Psi; + using Microsoft.Psi.Imaging; + using Windows.Perception; + +/// +/// Depth camera source component. +/// + public class DepthCamera : ResearchModeCamera + { + private const byte InvalidMask = 0x80; + private const ushort InvalidAhatValue = 4090; + + private readonly DepthCameraConfiguration configuration; + private readonly bool isLongThrow; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// The configuration for this component. + public DepthCamera(Pipeline pipeline, DepthCameraConfiguration configuration = null) + : base( + pipeline, + (configuration ?? new DepthCameraConfiguration()).Mode, + (configuration ?? new DepthCameraConfiguration()).OutputCalibrationMap, + (configuration ?? new DepthCameraConfiguration()).OutputCalibration) + { + this.configuration = configuration ?? new DepthCameraConfiguration(); + + if (this.configuration.Mode != ResearchModeSensorType.DepthLongThrow && + this.configuration.Mode != ResearchModeSensorType.DepthAhat) + { + throw new ArgumentException($"Initializing the depth camera in {this.configuration.Mode} mode is not supported."); + } + + this.isLongThrow = this.configuration.Mode == ResearchModeSensorType.DepthLongThrow; + + this.DepthImage = pipeline.CreateEmitter>(this, nameof(this.DepthImage)); + this.InfraredImage = pipeline.CreateEmitter>(this, nameof(this.InfraredImage)); + } + + /// + /// Gets the depth image stream. + /// + public Emitter> DepthImage { get; } + + /// + /// Gets the infrared image stream. + /// + public Emitter> InfraredImage { get; } + + /// + protected override void ProcessSensorFrame(IResearchModeSensorFrame sensorFrame, ResearchModeSensorResolution resolution, ulong frameTicks, DateTime originatingTime) + { + if (this.configuration.OutputCalibrationMap && + (originatingTime - this.CalibrationPointsMap.LastEnvelope.OriginatingTime) > this.configuration.OutputCalibrationMapInterval) + { + // Post the calibration map created at the start + this.CalibrationPointsMap.Post(this.GetCalibrationPointsMap(), originatingTime); + } + + if (this.configuration.OutputCalibration) + { + // Post the intrinsics computed at the start + this.CameraIntrinsics.Post(this.GetCameraIntrinsics(), originatingTime); + } + + if (this.configuration.OutputPose) + { + var timestamp = PerceptionTimestampHelper.FromSystemRelativeTargetTime(TimeSpan.FromTicks((long)frameTicks)); + var rigNodeLocation = this.RigNodeLocator.TryLocateAtTimestamp(timestamp, MixedReality.WorldSpatialCoordinateSystem); + + // The rig node may not always be locatable, so we need a null check + if (rigNodeLocation != null) + { + // Compute the camera pose from the rig node location + var cameraWorldPose = this.ToCameraPose(rigNodeLocation); + this.Pose.Post(cameraWorldPose, originatingTime); + } + } + + var depthFrame = sensorFrame as ResearchModeSensorDepthFrame; + int depthImageWidth = (int)resolution.Width; + int depthImageHeight = (int)resolution.Height; + + // Process and post the depth image if requested + if (this.configuration.OutputDepth) + { + byte[] sigmaBuffer = null; + var depthBuffer = depthFrame.GetBuffer(); + + if (this.isLongThrow) + { + sigmaBuffer = depthFrame.GetSigmaBuffer(); // Long-throw only + Debug.Assert(depthBuffer.Length == sigmaBuffer.Length, "Depth and sigma buffers should be of equal size!"); + } + + using var depthImage = DepthImagePool.GetOrCreate(depthImageWidth, depthImageHeight); + Debug.Assert(depthImage.Resource.Size == depthBuffer.Length * sizeof(ushort), "DepthImage size does not match raw depth buffer size!"); + + unsafe + { + ushort* depthData = (ushort*)depthImage.Resource.ImageData.ToPointer(); + for (int i = 0; i < depthBuffer.Length; ++i) + { + bool invalid = this.isLongThrow ? + ((sigmaBuffer[i] & InvalidMask) > 0) : + (depthBuffer[i] >= InvalidAhatValue); + + *depthData++ = invalid ? (ushort)0 : depthBuffer[i]; + } + } + + this.DepthImage.Post(depthImage, originatingTime); + } + + // Process and post the infrared image if requested + if (this.configuration.OutputInfrared) + { + var infraredBuffer = depthFrame.GetAbDepthBuffer(); + using var infraredImage = ImagePool.GetOrCreate(depthImageWidth, depthImageHeight, PixelFormat.Gray_16bpp); + Debug.Assert(infraredImage.Resource.Size == infraredBuffer.Length * sizeof(ushort), "InfraredImage size does not match raw infrared buffer size!"); + + unsafe + { + fixed (ushort* p = infraredBuffer) + { + infraredImage.Resource.CopyFrom((IntPtr)p); + } + } + + this.InfraredImage.Post(infraredImage, originatingTime); + } + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/DepthCameraConfiguration.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/DepthCameraConfiguration.cs new file mode 100644 index 000000000..7c7431fcc --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/DepthCameraConfiguration.cs @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using HoloLens2ResearchMode; + + /// + /// Configuration for the component. + /// + public class DepthCameraConfiguration + { + /// + /// Gets or sets a value indicating whether the calibration settings are emitted. + /// + public bool OutputCalibration { get; set; } = true; + + /// + /// Gets or sets a value indicating whether the original map of points for calibration are emitted. + /// + public bool OutputCalibrationMap { get; set; } = true; + + /// + /// Gets or sets the minimum interval between posting calibration map messages. + /// + public TimeSpan OutputCalibrationMapInterval { get; set; } = TimeSpan.FromSeconds(20); + + /// + /// Gets or sets a value indicating whether the camera pose stream is emitted. + /// + public bool OutputPose { get; set; } = true; + + /// + /// Gets or sets a value indicating whether the depth stream is emitted. + /// + public bool OutputDepth { get; set; } = true; + + /// + /// Gets or sets a value indicating whether the infrared stream is emitted. + /// + public bool OutputInfrared { get; set; } = true; + + /// + /// Gets or sets the sensor mode. + /// + /// Valid values are: DepthLongThrow or DepthAhat. + public ResearchModeSensorType Mode { get; set; } = ResearchModeSensorType.DepthLongThrow; + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Gyroscope.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Gyroscope.cs new file mode 100644 index 000000000..3dc25c997 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Gyroscope.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using HoloLens2ResearchMode; + using Microsoft.Psi; + + /// + /// Source component that publishes gyroscope data on a stream. + /// + public class Gyroscope : ResearchModeImu + { + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + public Gyroscope(Pipeline pipeline) + : base(pipeline, ResearchModeSensorType.ImuGyro) + { + } + + /// + protected override void ProcessSensorFrame(IResearchModeSensorFrame sensorFrame) + { + this.PostSamples( + sensorFrame, + (sensorFrame as ResearchModeGyroFrame).GetCalibratedGyroSamples(), + f => (f.X, f.Y, f.Z), + f => f.VinylHupTicks); + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ImageToGzipStreamEncoder.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ImageToGzipStreamEncoder.cs new file mode 100644 index 000000000..c18546c8c --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ImageToGzipStreamEncoder.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Imaging +{ + using System.IO; + using System.IO.Compression; + + /// + /// Implements an image encoder for GZip format. + /// + public class ImageToGZipStreamEncoder : IImageToStreamEncoder + { + /// + public void EncodeToStream(Image image, Stream stream) + { + unsafe + { + var size = image.Stride * image.Height; + var imageData = new UnmanagedMemoryStream((byte*)image.ImageData.ToPointer(), size); + using var compressor = new GZipStream(stream, CompressionMode.Compress, true); + imageData.CopyTo(compressor); + } + } + } +} \ No newline at end of file diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ImageToJpegStreamEncoder.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ImageToJpegStreamEncoder.cs new file mode 100644 index 000000000..7963b27ab --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ImageToJpegStreamEncoder.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using System.IO; + using System.Threading.Tasks; + using Microsoft.Psi.Imaging; + using Windows.Graphics.Imaging; + + /// + /// Implements a JPEG image encoder. + /// + public class ImageToJpegStreamEncoder : IImageToStreamEncoder + { + private readonly BitmapPropertySet propertySet; + + /// + /// Initializes a new instance of the class. + /// + /// Optional image quality (0.0 - 1.0, default 1.0). + public ImageToJpegStreamEncoder(double imageQuality = 1.0) + { + this.propertySet = new BitmapPropertySet(); + this.propertySet.Add("ImageQuality", new BitmapTypedValue(imageQuality, Windows.Foundation.PropertyType.Single)); + } + + /// + public void EncodeToStream(Image image, Stream stream) + { + this.Encode(image, stream).Wait(); + } + + private async Task Encode(Image image, Stream stream) + { + var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream.AsRandomAccessStream(), this.propertySet); + encoder.SetPixelData( + BitmapPixelFormat.Bgra8, + BitmapAlphaMode.Ignore, + (uint)image.Width, + (uint)image.Height, + 96, + 96, + image.ReadBytes(image.Size)); + await encoder.FlushAsync(); + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Magnetometer.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Magnetometer.cs new file mode 100644 index 000000000..6bc908c9c --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Magnetometer.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using HoloLens2ResearchMode; + using Microsoft.Psi; + + /// + /// Source component that publishes magnetometer data on a stream. + /// + public class Magnetometer : ResearchModeImu + { + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + public Magnetometer(Pipeline pipeline) + : base(pipeline, ResearchModeSensorType.ImuMag) + { + } + + /// + protected override void ProcessSensorFrame(IResearchModeSensorFrame sensorFrame) + { + this.PostSamples( + sensorFrame, + (sensorFrame as ResearchModeMagFrame).GetMagnetometerSamples(), + f => (f.X, f.Y, f.Z), + f => f.VinylHupTicks); + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Microsoft.Psi.MixedReality.UniversalWindows.csproj b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Microsoft.Psi.MixedReality.UniversalWindows.csproj new file mode 100644 index 000000000..b6118e2dc --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Microsoft.Psi.MixedReality.UniversalWindows.csproj @@ -0,0 +1,158 @@ + + + + + Debug + AnyCPU + {ECD9E150-8104-4DA3-B807-A6A4392A67C6} + Library + Properties + Microsoft.Psi.MixedReality + Microsoft.Psi.MixedReality.UniversalWindows + en-US + UAP + 10.0.19041.0 + 10.0.17763.0 + 14 + 512 + {A5A43C5B-DE2A-4C0C-9213-0A381AF9435A};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC} + true + Provides UWP-specific data structures and components for processing inputs and rendering in mixed-reality using StereoKit. + $(TargetsForTfmSpecificBuildOutput);AdditionalPackageFiles + + + ARM + true + bin\ARM\Debug\ + DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP + ;2008 + full + false + prompt + true + bin\ARM\Debug\Microsoft.Psi.MixedReality.UniversalWindows.XML + ..\..\..\Build\Microsoft.Psi.ruleset + false + true + + + ARM + bin\ARM\Release\ + TRACE;NETFX_CORE;WINDOWS_UWP + true + ;2008 + pdbonly + false + prompt + true + bin\ARM\Release\Microsoft.Psi.MixedReality.UniversalWindows.XML + ..\..\..\Build\Microsoft.Psi.ruleset + false + true + + + PackageReference + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 0.6.0 + + + 0.5.2069 + + + 6.2.12 + + + 6.0.0 + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + 0.3.5 + + + 1.1.118 + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + {84ce1fe5-8141-4c2a-ac30-21bdc87f5d0a} + Microsoft.Psi.Calibration + + + {9bf2e5ef-186a-4179-b753-ae11ee90e026} + Microsoft.Psi.Imaging + + + {04147400-0ab0-4f07-9975-d4b7e58150db} + Microsoft.Psi + + + {f6e0c57a-a54e-4b05-a783-3a1e0696be87} + Microsoft.Psi.Spatial.Euclidean + + + {f50194c0-9561-40c7-b9cb-b977e3b3d76d} + HoloLens2ResearchMode + all + + + {af0d1e9c-ae0e-4ddf-9a1a-ac512cef2bac} + Microsoft.Psi.MixedReality + + + + + + + 14.0 + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/MixedReality.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/MixedReality.cs new file mode 100644 index 000000000..7b4c3e83b --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/MixedReality.cs @@ -0,0 +1,124 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using System.Numerics; + using System.Threading.Tasks; + using MathNet.Spatial.Euclidean; + using StereoKit; + using Windows.Perception.Spatial; + + /// + /// Represents mixed reality utility functions. + /// + public static class MixedReality + { + private const string WorldSpatialAnchorId = "_world"; + + /// + /// Gets the world coordinate system. + /// + public static SpatialCoordinateSystem WorldSpatialCoordinateSystem { get; private set; } + + /// + /// Gets the spatial anchor helper. + /// + public static SpatialAnchorHelper SpatialAnchorHelper { get; private set; } + + /// + /// Initializes static members of the class. Attempts to initialize + /// the world coordinate system from a persisted spatial anchor. If one is not found, a stationary + /// frame of reference is created at the current location and its position is used as the world + /// coordinate system. + /// + /// A representing the asynchronous operation. + /// + /// This method should be called after SK.Initialize. + /// + public static async Task InitializeAsync() + { + if (!SK.IsInitialized) + { + throw new InvalidOperationException("StereoKit is not initialized. Call SK.Initialize before calling MixedReality.InitializeAsync."); + } + + // Create the spatial anchor helper + SpatialAnchorHelper = new SpatialAnchorHelper(await SpatialAnchorManager.RequestStoreAsync()); + + InitializeWorldCoordinateSystem(); + } + + /// + /// Initializes the world coordinate system for the application using a pre-defined spatial anchor, + /// or creates it at a stationary frame of reference if it does not exist. Once initialized, the + /// world coordinate system will be consistent across application sessions, unless the associated + /// spatial anchor is modified or deleted. + /// + private static void InitializeWorldCoordinateSystem() + { + // Try to get a previously saved world spatial anchor + var worldSpatialAnchor = SpatialAnchorHelper.TryGetSpatialAnchor(WorldSpatialAnchorId); + + if (worldSpatialAnchor != null) + { + // Set the world spatial coordinate system using the spatial anchor + WorldSpatialCoordinateSystem = worldSpatialAnchor.CoordinateSystem; + } + else + { + var locator = SpatialLocator.GetDefault(); + + if (locator != null) + { + // This creates a stationary frame of reference which we will use as our world origin + var world = locator.CreateStationaryFrameOfReferenceAtCurrentLocation(); + + // Save the world spatial coordinate system + WorldSpatialCoordinateSystem = world.CoordinateSystem; + + // Create a spatial anchor to represent the world origin and persist it to the spatial + // anchor store to ensure that the origin remains coherent between sessions. + worldSpatialAnchor = SpatialAnchorHelper.TryCreateSpatialAnchor(WorldSpatialAnchorId, WorldSpatialCoordinateSystem); + + if (worldSpatialAnchor == null) + { + System.Diagnostics.Trace.WriteLine($"WARNING: Could not create the persistent world spatial anchor."); + } + } + else + { + System.Diagnostics.Trace.WriteLine($"WARNING: Could not get spatial locator (expected in StereoKit on desktop)."); + } + } + + if (worldSpatialAnchor != null) + { + // At startup, we need to capture the pose of StereoKit with respect to the world anchor, and vice versa. + // These transforms will allow us to convert world coordinates to/from StereoKit coordinates where needed: + // on input from StereoKit -> \psi, and on output (rendering) \psi -> StereoKit + + // The pose of world anchor is essentially the inverse of the startup pose of StereoKit with respect to the world. + Matrix4x4 worldStereoKitMatrix = World.FromPerceptionAnchor(worldSpatialAnchor).ToMatrix(); + StereoKitTransforms.StereoKitStartingPoseInverse = new CoordinateSystem(worldStereoKitMatrix.ToMathNetMatrix().ChangeBasisHoloLensToPsi()); + + // Inverting then gives us the starting pose of StereoKit in the "world" (relative to the world anchor). + StereoKitTransforms.StereoKitStartingPose = StereoKitTransforms.StereoKitStartingPoseInverse.Invert(); + + System.Diagnostics.Trace.WriteLine($"StereoKit origin: {StereoKitTransforms.StereoKitStartingPose.Origin.X},{StereoKitTransforms.StereoKitStartingPose.Origin.Y},{StereoKitTransforms.StereoKitStartingPose.Origin.Z}"); + + // TODO: It would be nice if we could actually just shift the origin coordinate system in StereoKit + // to the pose currently defined in StereoKitTransforms.WorldPose. + // There's currently an open issue for this: https://github.com/maluoi/StereoKit/issues/189 + + // Simply setting the renderer camera root does not work, as its transform appears to be applied in the wrong order. + // E.g., if the starting StereoKit pose is at a yaw rotation of 180 degrees, we would want to apply that transform + // first, then apply the transform of the headset pose (perhaps pitching up). Instead, it appears that the headset + // pose is applied first (e.g., pitching up), and *then* the Renderer.CameraRoot transform is applied (yaw of 180 degrees) + // which in this example manifests as the pitch going down, opposite of what we desired. + ////Renderer.CameraRoot = stereoKitTransform.Inverse; + } + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/MixedRealityCapturePerspective.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/MixedRealityCapturePerspective.cs new file mode 100644 index 000000000..facd2b364 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/MixedRealityCapturePerspective.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + /// + /// Enumeration which indicates the perspective from which holograms are rendered in the mixed-reality image. + /// See https://docs.microsoft.com/en-us/windows/mixed-reality/develop/platform-capabilities-and-apis/mixed-reality-capture-for-developers for details. + /// + public enum MixedRealityCapturePerspective + { + /// + /// Screen perspective. + /// + Display = 0, + + /// + /// Photo video camera perspective. + /// + PhotoVideoCamera = 1, + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/MixedRealityCaptureVideoEffect.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/MixedRealityCaptureVideoEffect.cs new file mode 100644 index 000000000..8000280d7 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/MixedRealityCaptureVideoEffect.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using Windows.Foundation; + using Windows.Foundation.Collections; + using Windows.Media.Capture; + using Windows.Media.Effects; + + /// + /// Video effect definition for mixed-reality capture via the PV camera. See + /// https://docs.microsoft.com/en-us/windows/mixed-reality/develop/platform-capabilities-and-apis/mixed-reality-capture-for-developers + /// for more information on enabling mixed-reality capture. + /// + public class MixedRealityCaptureVideoEffect : IVideoEffectDefinition + { + private readonly PropertySet properties; + + /// + /// Initializes a new instance of the class. + /// + /// The capture stream to which this effect is to be applied. + /// The opacity of the holograms in range from 0.0 (fully transparent) to 1.0 (fully opaque). + /// + /// Value used to indicate which holographic camera view configuration should be captured: + /// 0 (Display) means that the app won't be asked to render from the photo/video camera, + /// 1 (PhotoVideoCamera) will ask the app to render from the photo/video camera (if the app supports it). + /// Only supported on HoloLens 2. + /// + public MixedRealityCaptureVideoEffect( + MediaStreamType streamType = MediaStreamType.VideoRecord, + float globalOpacityCoefficient = 0.9f, + MixedRealityCapturePerspective preferredHologramPerspective = MixedRealityCapturePerspective.PhotoVideoCamera) + { + this.properties = new () + { + { "StreamType", streamType }, + { "HologramCompositionEnabled", true }, + { "RecordingIndicatorEnabled", false }, + { "VideoStabilizationEnabled", false }, + { "VideoStabilizationBufferLength", 0 }, + { "GlobalOpacityCoefficient", globalOpacityCoefficient }, + { "BlankOnProtectedContent", false }, + { "ShowHiddenMesh", false }, + { "OutputSize", new Size(0, 0) }, + { "PreferredHologramPerspective", (uint)preferredHologramPerspective }, // cast is necessary for this to work + }; + } + + /// + /// Gets the class ID of this video effect definition. + /// + public string ActivatableClassId => "Windows.Media.MixedRealityCapture.MixedRealityCaptureVideoEffect"; + + /// + /// Gets the properties of this video effect definition. + /// + public IPropertySet Properties => this.properties; + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Operators.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Operators.cs new file mode 100644 index 000000000..dbbcfaf85 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Operators.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System.Numerics; + using MathNet.Spatial.Euclidean; + using Windows.Perception.Spatial; + using Quaternion = System.Numerics.Quaternion; + + /// + /// Implements operators. + /// + public static partial class Operators + { + /// + /// Converts a in HoloLens basis to a in \psi basis. + /// + /// The . + /// The . + public static CoordinateSystem TryConvertSpatialCoordinateSystemToPsiCoordinateSystem(this SpatialCoordinateSystem spatialCoordinateSystem) + { + var worldPose = spatialCoordinateSystem.TryGetTransformTo(MixedReality.WorldSpatialCoordinateSystem); + return worldPose.HasValue ? new CoordinateSystem(worldPose.Value.ToMathNetMatrix().ChangeBasisHoloLensToPsi()) : null; + } + + /// + /// Converts a in \psi basis to a in HoloLens basis. + /// + /// The in \psi basis. + /// The . + public static SpatialCoordinateSystem TryConvertPsiCoordinateSystemToSpatialCoordinateSystem(this CoordinateSystem coordinateSystem) + { + var holoLensMatrix = coordinateSystem.ChangeBasisPsiToHoloLens().ToSystemNumericsMatrix(); + var translation = holoLensMatrix.Translation; + holoLensMatrix.Translation = Vector3.Zero; + var rotation = Quaternion.CreateFromRotationMatrix(holoLensMatrix); + var spatialAnchor = SpatialAnchor.TryCreateRelativeTo(MixedReality.WorldSpatialCoordinateSystem, translation, rotation); + return spatialAnchor?.CoordinateSystem; + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/PhotoVideoCamera.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/PhotoVideoCamera.cs new file mode 100644 index 000000000..ed65f8d27 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/PhotoVideoCamera.cs @@ -0,0 +1,541 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using System.Collections.Generic; + using System.Linq; + using System.Text; + using System.Threading.Tasks; + using MathNet.Numerics.LinearAlgebra; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi; + using Microsoft.Psi.Calibration; + using Microsoft.Psi.Components; + using Microsoft.Psi.Imaging; + using Windows.Foundation; + using Windows.Graphics.Imaging; + using Windows.Media.Capture; + using Windows.Media.Capture.Frames; + + /// + /// Photo/video (PV) camera source component. + /// + public class PhotoVideoCamera : ISourceComponent, IDisposable + { + private readonly PhotoVideoCameraConfiguration configuration; + private readonly Pipeline pipeline; + private readonly Task initMediaCaptureTask; + + private MediaCapture mediaCapture; + private MediaFrameReader videoFrameReader; + private MediaFrameReader previewFrameReader; + private TypedEventHandler videoFrameHandler; + private TypedEventHandler previewFrameHandler; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// The configuration for this component. + public PhotoVideoCamera(Pipeline pipeline, PhotoVideoCameraConfiguration configuration = null) + { + this.pipeline = pipeline; + this.configuration = configuration ?? new PhotoVideoCameraConfiguration(); + this.VideoImage = pipeline.CreateEmitter>(this, nameof(this.VideoImage)); + this.PreviewImage = pipeline.CreateEmitter>(this, nameof(this.PreviewImage)); + this.VideoIntrinsics = pipeline.CreateEmitter(this, nameof(this.VideoIntrinsics)); + this.VideoPose = pipeline.CreateEmitter(this, nameof(this.VideoPose)); + this.PreviewIntrinsics = pipeline.CreateEmitter(this, nameof(this.PreviewIntrinsics)); + this.PreviewPose = pipeline.CreateEmitter(this, nameof(this.PreviewPose)); + + // Call this here (rather than in the Start() method, which is executed on the thread pool) to + // ensure that MediaCapture.InitializeAsync() is called from an STA thread (this constructor must + // itself be called from an STA thread in order for this to be true). Calls from an MTA thread may + // result in undefined behavior, per the following documentation: + // https://docs.microsoft.com/en-us/uwp/api/windows.media.capture.mediacapture.initializeasync + this.initMediaCaptureTask = this.InitializeMediaCaptureAsync(); + } + + /// + /// Gets the video image stream. + /// + public Emitter> VideoImage { get; } + + /// + /// Gets the video camera pose stream. + /// + public Emitter VideoPose { get; } + + /// + /// Gets the video camera intrinsics stream. + /// + public Emitter VideoIntrinsics { get; } + + /// + /// Gets the preview image stream. + /// + public Emitter> PreviewImage { get; } + + /// + /// Gets the preview camera pose stream. + /// + public Emitter PreviewPose { get; } + + /// + /// Gets the preview camera intrinsics stream. + /// + public Emitter PreviewIntrinsics { get; } + + /// + public void Dispose() + { + if (this.mediaCapture != null) + { + this.mediaCapture.Dispose(); + this.mediaCapture = null; + } + } + + /// + public async void Start(Action notifyCompletionTime) + { + // notify that this is an infinite source component + notifyCompletionTime(DateTime.MaxValue); + + // Ensure that media capture initialization has finished + await this.initMediaCaptureTask; + + // Start the media frame reader for the Video stream, if configured + if (this.videoFrameReader != null) + { + var status = await this.videoFrameReader.StartAsync(); + if (status != MediaFrameReaderStartStatus.Success) + { + throw new InvalidOperationException($"Video stream media frame reader failed to start: {status}"); + } + + if (this.configuration.VideoStreamSettings.MixedRealityCapture != null) + { + // Add the mixed-reality effect to the VideoRecord stream so we can capture the video with holograms. + // Note that this is done *after* capture has started, as outlined in the documentation here: + // https://docs.microsoft.com/en-us/windows/mixed-reality/develop/platform-capabilities-and-apis/mixed-reality-capture-for-developers#mrc-access-for-developers + await this.mediaCapture.AddVideoEffectAsync(this.configuration.VideoStreamSettings.MixedRealityCapture, MediaStreamType.VideoRecord); + } + + // Create the frame handler - this handles the FrameArrived event which is raised + // whenever a new Video frame is available. The frame image, pose and intrinsics + // (if configured) are then posted on the respective output emitters. + this.videoFrameHandler = this.CreateMediaFrameHandler( + this.VideoImage, + this.configuration.VideoStreamSettings.OutputIntrinsics ? this.VideoIntrinsics : null, + this.configuration.VideoStreamSettings.OutputPose ? this.VideoPose : null); + + this.videoFrameReader.FrameArrived += this.videoFrameHandler; + } + + // Start the media frame reader for the Preview stream, if configured + if (this.previewFrameReader != null) + { + var status = await this.previewFrameReader.StartAsync(); + if (status != MediaFrameReaderStartStatus.Success) + { + throw new InvalidOperationException($"Preview stream media frame reader failed to start: {status}"); + } + + if (this.configuration.PreviewStreamSettings.MixedRealityCapture != null) + { + // Add the mixed-reality effect to the VideoPreview stream so we can capture the video with holograms. + // Note that this is done *after* capture has started, as outlined in the documentation here: + // https://docs.microsoft.com/en-us/windows/mixed-reality/develop/platform-capabilities-and-apis/mixed-reality-capture-for-developers#mrc-access-for-developers + await this.mediaCapture.AddVideoEffectAsync(this.configuration.PreviewStreamSettings.MixedRealityCapture, MediaStreamType.VideoPreview); + } + + // Create the frame handler - this handles the FrameArrived event which is raised + // whenever a new Preview frame is available. The frame image, pose and intrinsics + // (if configured) are then posted on the respective output emitters. + this.previewFrameHandler = this.CreateMediaFrameHandler( + this.PreviewImage, + this.configuration.PreviewStreamSettings.OutputIntrinsics ? this.PreviewIntrinsics : null, + this.configuration.PreviewStreamSettings.OutputPose ? this.PreviewPose : null); + + this.previewFrameReader.FrameArrived += this.previewFrameHandler; + } + } + + /// + public async void Stop(DateTime finalOriginatingTime, Action notifyCompleted) + { + if (this.videoFrameReader != null) + { + this.videoFrameReader.FrameArrived -= this.videoFrameHandler; + + await this.videoFrameReader.StopAsync(); + this.videoFrameReader.Dispose(); + this.videoFrameReader = null; + } + + if (this.previewFrameReader != null) + { + this.previewFrameReader.FrameArrived -= this.previewFrameHandler; + + await this.previewFrameReader.StopAsync(); + this.previewFrameReader.Dispose(); + this.previewFrameReader = null; + } + + notifyCompleted(); + } + + /// + /// Initializes the MediaCapture object and creates the MediaFrameReaders for the configured capture streams. + /// + /// A task representing the asynchronous operation. + private async Task InitializeMediaCaptureAsync() + { + // Try to find the media capture settings for the requested capture configuration + var settings = await this.CreateMediaCaptureSettingsAsync(); + + // If we couldn't create the settings, retrieve and print all the supported capture modes + if (settings == null) + { + var supportedModes = await this.GetSupportedMediaCaptureModesAsync(); + + // Pretty-print the list of supported modes + var msg = new StringBuilder(); + msg.AppendLine("No media frame source group was found that matched the requested capture parameters. Please select from the following profiles and resolutions:"); + foreach (var profileModes in supportedModes.GroupBy(x => x.Profile.Id)) + { + msg.AppendLine($"Profile: {profileModes.Key}"); + foreach (var mode in profileModes + .OrderByDescending(x => x.Type) + .ThenByDescending(x => x.Description.Width) + .ThenBy(x => x.Description.FrameRate)) + { + msg.AppendLine($" {mode.Type}: {mode.Description.Width}x{mode.Description.Height} @ {mode.Description.FrameRate}fps"); + } + + msg.AppendLine(); + } + + msg.AppendLine("If capturing both the Video and Preview streams, the requested resolutions must both be supported by the same profile."); + + // Display the list of supported modes in the exception message + throw new InvalidOperationException(msg.ToString()); + } + + var selectedSourceGroup = settings.SourceGroup; + + // Initialize the MediaCapture object + this.mediaCapture = new MediaCapture(); + await this.mediaCapture.InitializeAsync(settings); + + // Create the MediaFrameReader for the Video stream + if (this.configuration.VideoStreamSettings != null) + { + this.videoFrameReader = await this.CreateMediaFrameReaderAsync( + selectedSourceGroup, + this.configuration.VideoStreamSettings.ImageWidth, + this.configuration.VideoStreamSettings.ImageHeight, + this.configuration.VideoStreamSettings.FrameRate, + MediaStreamType.VideoRecord); + + if (this.videoFrameReader == null) + { + throw new InvalidOperationException("Could not create a frame reader for the requested video settings."); + } + } + + // Create the MediaFrameReader for the Preview stream + if (this.configuration.PreviewStreamSettings != null) + { + this.previewFrameReader = await this.CreateMediaFrameReaderAsync( + selectedSourceGroup, + this.configuration.PreviewStreamSettings.ImageWidth, + this.configuration.PreviewStreamSettings.ImageHeight, + this.configuration.PreviewStreamSettings.FrameRate, + MediaStreamType.VideoPreview); + + if (this.previewFrameReader == null) + { + throw new InvalidOperationException("Could not create a frame reader for the requested preview settings."); + } + } + } + + /// + /// Gets all the supported media capture modes supported by the current device. + /// + /// A list of supported media capture modes (profile and description). + private async Task> GetSupportedMediaCaptureModesAsync() + { + var supportedModes = new List<(MediaStreamType Type, MediaCaptureVideoProfile Profile, MediaCaptureVideoProfileMediaDescription Description)>(); + var mediaFrameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); + foreach (var mediaFrameSourceGroup in mediaFrameSourceGroups) + { + var knownProfiles = MediaCapture.FindAllVideoProfiles(mediaFrameSourceGroup.Id); + + // Search for Video and Preview stream types + foreach (var knownProfile in knownProfiles) + { + foreach (var knownDesc in knownProfile.SupportedRecordMediaDescription) + { + supportedModes.Add((MediaStreamType.VideoRecord, knownProfile, knownDesc)); + } + + foreach (var knownDesc in knownProfile.SupportedPreviewMediaDescription) + { + supportedModes.Add((MediaStreamType.VideoPreview, knownProfile, knownDesc)); + } + } + } + + return supportedModes; + } + + /// + /// Creates the initialization settings for the MediaCapture object that will support + /// all the requested capture settings specified in the configuration object. This method + /// will iterate through all the device's video capture profiles to find one that supports + /// the requested capture frame dimensions and frame rate. If both Video and Preview streams + /// are selected (e.g. for simultaneous mixed reality capture), then the selected profile must + /// support the capture modes for both streams. + /// + /// + /// A MediaCaptureInitializationSettings object for the first profile that satisfies all the + /// requested capture settings in the configuration object, or null if no such profile was found. + /// + private async Task CreateMediaCaptureSettingsAsync() + { + MediaFrameSourceGroup selectedSourceGroup = null; + MediaCaptureVideoProfile profile = null; + MediaCaptureVideoProfileMediaDescription videoDesc = null; + MediaCaptureVideoProfileMediaDescription previewDesc = null; + + var mediaFrameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); + + // Search all source groups + foreach (var mediaFrameSourceGroup in mediaFrameSourceGroups) + { + // Search for a profile that supports the requested capture modes + var knownProfiles = MediaCapture.FindAllVideoProfiles(mediaFrameSourceGroup.Id); + foreach (var knownProfile in knownProfiles) + { + // If a video stream capture mode was specified + if (this.configuration.VideoStreamSettings != null) + { + // Clear any partial matches and continue searching + profile = null; + videoDesc = null; + selectedSourceGroup = null; + + // Search the supported video (recording) modes for the requested resolution and frame rate + foreach (var knownDesc in knownProfile.SupportedRecordMediaDescription) + { + if (knownDesc.Width == this.configuration.VideoStreamSettings.ImageWidth && + knownDesc.Height == this.configuration.VideoStreamSettings.ImageHeight && + knownDesc.FrameRate == this.configuration.VideoStreamSettings.FrameRate) + { + // Found a match for video. Need to also match the requested preview mode (if any) + // within the same profile and source group, otherwise we have to keep searching. + profile = knownProfile; + videoDesc = knownDesc; + selectedSourceGroup = mediaFrameSourceGroup; + break; + } + } + + if (profile == null) + { + // This profile does not support the requested video stream capture parameters - try the next profile + continue; + } + } + + // If a preview stream capture mode was specified + if (this.configuration.PreviewStreamSettings != null) + { + // Clear any partial matches and continue searching + profile = null; + previewDesc = null; + selectedSourceGroup = null; + + // Search the supported preview modes for the requested resolution and frame rate + foreach (var knownDesc in knownProfile.SupportedPreviewMediaDescription) + { + if (knownDesc.Width == this.configuration.PreviewStreamSettings.ImageWidth && + knownDesc.Height == this.configuration.PreviewStreamSettings.ImageHeight && + knownDesc.FrameRate == this.configuration.PreviewStreamSettings.FrameRate) + { + // Found a match + profile = knownProfile; + previewDesc = knownDesc; + selectedSourceGroup = mediaFrameSourceGroup; + break; + } + } + + if (profile == null) + { + // This profile does not support the requested preview mode - try the next profile + continue; + } + } + + if (profile != null) + { + // Found a valid profile that supports the requested capture settings + return new MediaCaptureInitializationSettings + { + VideoProfile = profile, + RecordMediaDescription = videoDesc, + PreviewMediaDescription = previewDesc, + VideoDeviceId = selectedSourceGroup.Id, + StreamingCaptureMode = StreamingCaptureMode.Video, + MemoryPreference = MediaCaptureMemoryPreference.Cpu, + SharingMode = MediaCaptureSharingMode.ExclusiveControl, + SourceGroup = selectedSourceGroup, + }; + } + } + } + + // No matching settings were found + return null; + } + + /// + /// Creates a MediaFrameReader from the media source group for the given target capture settings. + /// + /// The media source group. + /// The requested capture frame width. + /// The requested capture frame height. + /// The requested capture frame rate. + /// The requested capture stream type. + /// A task representing the asynchronous operation. + private async Task CreateMediaFrameReaderAsync(MediaFrameSourceGroup sourceGroup, int targetWidth, int targetHeight, int targetFrameRate, MediaStreamType targetStreamType) + { + // Search all color frame sources of the requested stream type (Video or Preview) + foreach (var sourceInfo in sourceGroup.SourceInfos + .Where(si => si.SourceKind == MediaFrameSourceKind.Color && si.MediaStreamType == targetStreamType)) + { + var frameSource = this.mediaCapture.FrameSources[sourceInfo.Id]; + + // Check if the frame source supports the requested format + foreach (var format in frameSource.SupportedFormats) + { + int frameRate = (int)Math.Round((double)format.FrameRate.Numerator / format.FrameRate.Denominator); + if (format.VideoFormat.Width == targetWidth && + format.VideoFormat.Height == targetHeight && + frameRate == targetFrameRate) + { + // Found a frame source for the requested format - create the frame reader + await frameSource.SetFormatAsync(format); + return await this.mediaCapture.CreateFrameReaderAsync(frameSource); + } + } + } + + // No frame source was found for the requested format + return null; + } + + /// + /// Creates an event handler that handles the FrameArrived event of the MediaFrameReader. + /// + /// The stream on which to post the output image. + /// The stream on which to post the camera intrinsics. + /// The stream on which to post the camera pose. + /// The event handler. + private TypedEventHandler CreateMediaFrameHandler( + Emitter> imageStream, + Emitter intrinsicsStream = null, + Emitter poseStream = null) + { + // Cache the intrinsics + ICameraIntrinsics cameraIntrinsics = null; + + return (sender, args) => + { + using var frame = sender.TryAcquireLatestFrame(); + if (frame != null) + { + // Convert frame QPC time to pipeline time + var frameTimestamp = frame.SystemRelativeTime.Value.Ticks; + var originatingTime = this.pipeline.GetCurrentTimeFromElapsedTicks(frameTimestamp); + + // Post the camera intrinsics if requested + if (intrinsicsStream != null) + { + cameraIntrinsics ??= this.GetCameraIntrinsics(frame); + intrinsicsStream.Post(cameraIntrinsics, originatingTime); + } + + // Post the camera pose if requested + if (poseStream != null) + { + // Convert the frame coordinate system to world pose in psi basis + var worldPose = frame.CoordinateSystem?.TryConvertSpatialCoordinateSystemToPsiCoordinateSystem(); + poseStream.Post(worldPose, originatingTime); + } + + // Accessing the VideoMediaFrame.SoftwareBitmap property creates a strong reference which needs to be Disposed, per the remarks here: + // https://docs.microsoft.com/en-us/uwp/api/windows.media.capture.frames.mediaframereference?view=winrt-19041#remarks + using var frameBitmap = frame.VideoMediaFrame.SoftwareBitmap; + + // Convert from NV12 to BGRA32 + using var softwareBitmap = SoftwareBitmap.Convert(frameBitmap, BitmapPixelFormat.Bgra8); + + // Copy bitmap data into a Shared + unsafe + { + using var sharedImage = ImagePool.GetOrCreate(softwareBitmap.PixelWidth, softwareBitmap.PixelHeight, PixelFormat.BGRA_32bpp); + using var input = softwareBitmap.LockBuffer(BitmapBufferAccessMode.Read); + using var inputReference = input.CreateReference(); + ((UnsafeNative.IMemoryBufferByteAccess)inputReference).GetBuffer(out byte* imageData, out uint size); + + // Debug.Assert(size == sharedImage.Resource.Size); + sharedImage.Resource.CopyFrom((IntPtr)imageData); + + // Post image stream + imageStream.Post(sharedImage, originatingTime); + } + } + }; + } + + /// + /// Extracts the camera intrinsics from the supplied frame. + /// + /// The frame from which to extract the camera intrinsics. + /// The camera intrinsics. + private CameraIntrinsics GetCameraIntrinsics(MediaFrameReference frame) + { + var intrinsics = frame.VideoMediaFrame.CameraIntrinsics; + + var transform = Matrix.Build.Dense(3, 3); + transform[0, 0] = intrinsics.FocalLength.X; + transform[1, 1] = intrinsics.FocalLength.Y; + transform[0, 2] = intrinsics.PrincipalPoint.X; + transform[1, 2] = intrinsics.PrincipalPoint.Y; + transform[2, 2] = 1; + + var radialDistortion = Vector.Build.Dense(6, 0); + radialDistortion[0] = intrinsics.RadialDistortion.X; + radialDistortion[1] = intrinsics.RadialDistortion.Y; + radialDistortion[2] = intrinsics.RadialDistortion.Z; + + var tangentialDistortion = Vector.Build.Dense(2, 0); + tangentialDistortion[0] = intrinsics.TangentialDistortion.X; + tangentialDistortion[1] = intrinsics.TangentialDistortion.Y; + + return new CameraIntrinsics( + (int)intrinsics.ImageWidth, + (int)intrinsics.ImageHeight, + transform, + radialDistortion, + tangentialDistortion); + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/PhotoVideoCameraConfiguration.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/PhotoVideoCameraConfiguration.cs new file mode 100644 index 000000000..9997764ad --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/PhotoVideoCameraConfiguration.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + /// + /// Configuration for the component. + /// + public class PhotoVideoCameraConfiguration + { + /// + /// Gets or sets the settings for the stream, or null to omit. + /// + public StreamSettings VideoStreamSettings { get; set; } = new (); // use defaults + + /// + /// Gets or sets the settings for the stream, or null to omit. + /// + public StreamSettings PreviewStreamSettings { get; set; } = null; + + /// + /// Defines the capture settings for the Video or Preview streams. + /// + /// + /// Valid capture profiles for HoloLens2 are as follows. + /// + /// 2272x1278 (15,30fps, Video, Preview) + /// 1952x1100 (15,30fps, Video, Preview) + /// 1920x1080 (15,30fps, Video, Preview) + /// 1504x846 (15,30fps, Video, Preview) + /// 1280x720 (15,30fps, Video, Preview) + /// 1128x636 (15,30fps, Video only) + /// 960x540 (15,30fps, Video only) + /// 896x504 (15,30fps, Video, Preview) + /// 760x428 (15,30fps, Video only) + /// 640x360 (15,30fps, Video only) + /// 500x282 (15,30fps, Video only) + /// 424x240 (15,30fps, Video only) + /// + /// For more info, + /// see https://docs.microsoft.com/en-us/windows/mixed-reality/develop/platform-capabilities-and-apis/locatable-camera#hololens-2. + /// + /// If capturing both Video and Preview streams, the selected capture settings must be supported in the same camera profile. + /// Each stream represents a virtual camera in the camera profile and therefore each has its own Instrinsics and Pose streams. + /// For the HoloLens, since the Video and Preview streams both ultimately originate from the PV camera, the data on the Pose + /// streams will be identical, representing the PV camera pose. It is therefore only necessary to capture one of the Pose + /// streams when both Video and Preview capture are enabled. The Intrinsics may be different if the capture resolutions are + /// different. You may configure whether or not to emit the Pose and/or Intrinsics stream on the Video and Preview streams + /// by setting the respective OutputPose and OutputIntrinsics configuration parameter. + /// + public class StreamSettings + { + /// + /// Initializes a new instance of the class. + /// + public StreamSettings() + { + } + + /// + /// Gets or sets the capture frame rate. + /// + public int FrameRate { get; set; } = 15; + + /// + /// Gets or sets the capture image width. + /// + public int ImageWidth { get; set; } = 1280; + + /// + /// Gets or sets the capture image height. + /// + public int ImageHeight { get; set; } = 720; + + /// + /// Gets or sets a value indicating whether the camera intrinsics are emitted. + /// + public bool OutputIntrinsics { get; set; } = true; + + /// + /// Gets or sets a value indicating whether the camera pose is emitted. + /// + public bool OutputPose { get; set; } = true; + + /// + /// Gets or sets the settings for mixed reality capture, or null to omit holograms on this stream. + /// + public MixedRealityCaptureVideoEffect MixedRealityCapture { get; set; } = null; + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Properties/AssemblyInfo.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Properties/AssemblyInfo.cs new file mode 100644 index 000000000..3fab76572 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Properties/AssemblyInfo.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Microsoft.Psi.MixedReality.UniversalWindows")] +[assembly: AssemblyDescription("Provides mixed reality components for the HoloLens 2.")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("Microsoft Corporation")] +[assembly: AssemblyProduct("Microsoft.Psi.MixedReality.UniversalWindows")] +[assembly: AssemblyCopyright("© Microsoft Corporation. All rights reserved.")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("0.16.92.1")] +[assembly: AssemblyFileVersion("0.16.92.1")] +[assembly: AssemblyInformationalVersion("0.16.92.1-beta")] +[assembly: ComVisible(false)] diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Properties/Microsoft.Psi.MixedReality.UniversalWindows.rd.xml b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Properties/Microsoft.Psi.MixedReality.UniversalWindows.rd.xml new file mode 100644 index 000000000..7c5ab979a --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/Properties/Microsoft.Psi.MixedReality.UniversalWindows.rd.xml @@ -0,0 +1,33 @@ + + + + + + + + + diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ResearchModeCamera.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ResearchModeCamera.cs new file mode 100644 index 000000000..6e3df855d --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ResearchModeCamera.cs @@ -0,0 +1,321 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using System.Collections.Generic; + using System.Numerics; + using System.Threading; + using System.Threading.Tasks; + using HoloLens2ResearchMode; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi; + using Microsoft.Psi.Calibration; + using Microsoft.Psi.Components; + using Windows.Foundation; + using Windows.Perception.Spatial; + using Windows.Perception.Spatial.Preview; + + /// + /// Represents an abstract base class for a HoloLens 2 research mode camera component. + /// + public abstract class ResearchModeCamera : ISourceComponent + { + // Camera coordinate system (x - right, y - down, z - forward) relative + // to the HoloLens coordinate system (x - right, y - up, z - back) + private static readonly CoordinateSystem CameraCoordinateSystem = + new (default, UnitVector3D.XAxis, UnitVector3D.YAxis.Negate(), UnitVector3D.ZAxis.Negate()); + + private readonly Pipeline pipeline; + private readonly ResearchModeSensorDevice sensorDevice; + private readonly ResearchModeCameraSensor cameraSensor; + private readonly Task requestCameraAccessTask; + private readonly SpatialLocator rigNodeLocator; + private readonly bool createCalibrationMap; + private readonly bool computeCameraIntrinsics; + + private CalibrationPointsMap calibrationPointsMap; + private ICameraIntrinsics cameraIntrinsics; + private CoordinateSystem cameraExtrinsics; + private Thread captureThread; + private bool shutdown; + +#if DEBUG + private DateTime previousFrameOriginatingTime; + private int outOfOrderFrameCount; +#endif + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// The research mode sensor type. + /// A value indicating whether to create a map of calibration points (needed to compute intrinsics). + /// A value indicating whether to compute camera intrinsics. + public ResearchModeCamera(Pipeline pipeline, ResearchModeSensorType sensorType, bool createCalibrationMap = true, bool computeCameraIntrinsics = true) + { + this.pipeline = pipeline; + this.createCalibrationMap = createCalibrationMap; + this.computeCameraIntrinsics = computeCameraIntrinsics; + + this.Pose = pipeline.CreateEmitter(this, nameof(this.Pose)); + + if (this.computeCameraIntrinsics) + { + this.CameraIntrinsics = pipeline.CreateEmitter(this, nameof(this.CameraIntrinsics)); + } + + if (this.createCalibrationMap) + { + this.CalibrationPointsMap = pipeline.CreateEmitter(this, nameof(this.CalibrationPointsMap)); + } + + this.sensorDevice = new ResearchModeSensorDevice(); + this.requestCameraAccessTask = this.sensorDevice.RequestCameraAccessAsync().AsTask(); + this.cameraSensor = (ResearchModeCameraSensor)this.sensorDevice.GetSensor(sensorType); + + Guid rigNodeGuid = this.sensorDevice.GetRigNodeId(); + this.rigNodeLocator = SpatialGraphInteropPreview.CreateLocatorForNode(rigNodeGuid); + +#if DEBUG + // Debug stream to track out-of-order frames which occasionally occur + this.DebugOutOfOrderFrames = pipeline.CreateEmitter(this, nameof(this.DebugOutOfOrderFrames)); +#endif + } + + /// + /// Gets the camera pose stream. + /// + public Emitter Pose { get; } + + /// + /// Gets the camera intrinsics stream. + /// + public Emitter CameraIntrinsics { get; } + + /// + /// Gets the stream for calibration map (image points and corresponding 3D camera points). + /// + public Emitter CalibrationPointsMap { get; } + + /// + /// Gets the stream on which the count of out of order frames are posted. + /// +#if DEBUG + public Emitter DebugOutOfOrderFrames { get; } +#else + public Emitter DebugOutOfOrderFrames { get; } = null; // DEBUG builds only +#endif + + /// + /// Gets the rig node locator. + /// + protected SpatialLocator RigNodeLocator => this.rigNodeLocator; + + /// + public void Start(Action notifyCompletionTime) + { + var consent = this.requestCameraAccessTask.Result; + this.CheckConsentAndThrow(consent); + + // notify that this is an infinite source component + notifyCompletionTime(DateTime.MaxValue); + + this.captureThread = new Thread(this.CaptureThread); + this.captureThread.Start(); + } + + /// + public void Stop(DateTime finalOriginatingTime, Action notifyCompleted) + { + this.shutdown = true; + this.captureThread.Join(5000); + + notifyCompleted(); + } + + /// + /// Processes a sensor frame received from the sensor. + /// + /// The sensor frame. + /// The resolution of the sensor frame. + /// The sensor frame ticks. + /// The originating time for the sensor frame. + protected abstract void ProcessSensorFrame(IResearchModeSensorFrame sensorFrame, ResearchModeSensorResolution resolution, ulong frameTicks, DateTime originatingTime); + + /// + /// Gets the camera intrinsics. + /// + /// The camera's intrinsics. + protected ICameraIntrinsics GetCameraIntrinsics() => this.cameraIntrinsics; + + /// + /// Gets the calibration points map (used for computing intrinsics)). + /// + /// The calibration points map. + protected CalibrationPointsMap GetCalibrationPointsMap() => this.calibrationPointsMap; + + /// + /// Converts the rig node location to the camera pose. + /// + /// The rig node location. + /// The coordinate system representing the camera pose. + protected CoordinateSystem ToCameraPose(SpatialLocation rigNodeLocation) + { + var q = rigNodeLocation.Orientation; + var m = Matrix4x4.CreateFromQuaternion(q); + var p = rigNodeLocation.Position; + m.Translation = p; + + // Extrinsics of the camera relative to the rig node + this.cameraExtrinsics ??= new CoordinateSystem(this.cameraSensor.GetCameraExtrinsicsMatrix().ToMathNetMatrix()); + + // Transform the rig node location to camera pose in world coordinates + var cameraPose = m.ToMathNetMatrix() * this.cameraExtrinsics.Invert() * CameraCoordinateSystem; + + // Convert to \psi basis + return new CoordinateSystem(cameraPose.ChangeBasisHoloLensToPsi()); + } + + private void CaptureThread() + { + // ResearchMode requires that OpenStream() and GetNextBuffer() are called from the same thread + this.cameraSensor.OpenStream(); + + try + { + if (this.createCalibrationMap || this.computeCameraIntrinsics) + { + // Get the resolution from the initial frame. We could also just have used constants + // based on the sensor type, but this approach keeps things more general/flexible. + var sensorFrame = this.cameraSensor.GetNextBuffer(); + var resolution = sensorFrame.GetResolution(); + var width = (int)resolution.Width; + var height = (int)resolution.Height; + + // Compute a lookup table of calibration points + List cameraPoints = new (); + List imagePoints = new (); + float[] cameraUnitPlanePoints = new float[width * height * 2]; + + int ci = 0; + for (int y = 0; y < height; y++) + { + for (int x = 0; x < width; x++) + { + // Check the return value for success (HRESULT == S_OK) + if (this.cameraSensor.MapImagePointToCameraUnitPlane(new Point(x + 0.5, y + 0.5), out var xy) == 0) + { + // Add the camera space mapping for the image pixel + cameraUnitPlanePoints[ci++] = (float)xy.X; + cameraUnitPlanePoints[ci++] = (float)xy.Y; + + var norm = Math.Sqrt((xy.X * xy.X) + (xy.Y * xy.Y) + 1.0); + imagePoints.Add(new Point2D(x + 0.5, y + 0.5)); + cameraPoints.Add(new Point3D(xy.X / norm, xy.Y / norm, 1.0 / norm)); + } + else + { + cameraUnitPlanePoints[ci++] = float.NaN; + cameraUnitPlanePoints[ci++] = float.NaN; + } + } + } + + this.calibrationPointsMap = new CalibrationPointsMap(width, height, cameraUnitPlanePoints); + + if (this.computeCameraIntrinsics) + { + // Compute instrinsics before the main loop as it could take a while. This avoids a long + // observed initial delay for the first posted frame while intrinsics are being computed. + this.cameraIntrinsics = this.ComputeCameraIntrinsics(width, height, cameraPoints, imagePoints); + } + } + + while (!this.shutdown) + { + var sensorFrame = this.cameraSensor.GetNextBuffer(); + var frameTicks = sensorFrame.GetTimeStamp().HostTicks; + var resolution = sensorFrame.GetResolution(); + + int imageWidth = (int)resolution.Width; + int imageHeight = (int)resolution.Height; + var originatingTime = this.pipeline.GetCurrentTimeFromElapsedTicks((long)frameTicks); + +#if DEBUG + if (originatingTime <= this.previousFrameOriginatingTime) + { + System.Diagnostics.Trace.WriteLine($"Attempted to post out of order message with originating time {originatingTime.TimeOfDay} from {this.GetType().Name}"); + + // Post the total number of out-of-order frames received on the debug stream + this.DebugOutOfOrderFrames.Post(++this.outOfOrderFrameCount, originatingTime); + + // Continue to the next frame + continue; + } + + this.previousFrameOriginatingTime = originatingTime; +#endif + + // Sensor-specific processing implemented by derived class + if (!this.shutdown) + { + this.ProcessSensorFrame(sensorFrame, resolution, frameTicks, originatingTime); + } + } + } + finally + { + this.cameraSensor.CloseStream(); + } + } + + private void CheckConsentAndThrow(ResearchModeSensorConsent consent) + { + switch (consent) + { + case ResearchModeSensorConsent.Allowed: + return; + case ResearchModeSensorConsent.DeniedBySystem: + throw new UnauthorizedAccessException("Access to the camera was denied by the system"); + case ResearchModeSensorConsent.DeniedByUser: + throw new UnauthorizedAccessException("Access to the camera was denied by the user"); + case ResearchModeSensorConsent.NotDeclaredByApp: + throw new UnauthorizedAccessException("Camera capability was not declared in the app manifest"); + case ResearchModeSensorConsent.UserPromptRequired: + throw new UnauthorizedAccessException("Permission to access to the camera must be requested first"); + } + } + + /// + /// Computes the camera intrinsics from a lookup table mapping image points to 3D points in camera space. + /// + /// The image width for the camera. + /// The image height for the camera. + /// The list of 3D camera points to use for calibration. + /// The list of corresponding 2D image points. + /// The camera intrinsics. + private ICameraIntrinsics ComputeCameraIntrinsics(int width, int height, List cameraPoints, List imagePoints) + { + // Initialize a starting camera matrix + var initialCameraMatrix = MathNet.Numerics.LinearAlgebra.Matrix.Build.Dense(3, 3); + var initialDistortion = MathNet.Numerics.LinearAlgebra.Vector.Build.Dense(2); + initialCameraMatrix[0, 0] = 250; // fx + initialCameraMatrix[1, 1] = 250; // fy + initialCameraMatrix[0, 2] = width / 2.0; // cx + initialCameraMatrix[1, 2] = height / 2.0; // cy + initialCameraMatrix[2, 2] = 1; + CalibrationExtensions.CalibrateCameraIntrinsics( + cameraPoints, + imagePoints, + initialCameraMatrix, + initialDistortion, + out var computedCameraMatrix, + out var computedDistortionCoefficients, + false); + + return new CameraIntrinsics(width, height, computedCameraMatrix, computedDistortionCoefficients, depthPixelSemantics: DepthPixelSemantics.DistanceToPoint); + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ResearchModeImu.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ResearchModeImu.cs new file mode 100644 index 000000000..b235beea4 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/ResearchModeImu.cs @@ -0,0 +1,145 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using System.Linq; + using System.Threading; + using System.Threading.Tasks; + using HoloLens2ResearchMode; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi; + using Microsoft.Psi.Components; + + /// + /// Implements an abstract base class for a HoloLens 2 research mode IMU component. + /// + public abstract class ResearchModeImu : IProducer<(Vector3D Sample, DateTime OriginatingTime)[]>, ISourceComponent + { + private readonly ResearchModeSensorDevice sensorDevice; + private readonly ResearchModeImuSensor imuSensor; + private readonly Task requestImuAccessTask; + + private Thread captureThread; + private bool shutdown; + private DateTime lastSampleOriginatingTime = DateTime.MinValue; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// The research mode sensor type. + public ResearchModeImu(Pipeline pipeline, ResearchModeSensorType sensorType) + { + this.Pipeline = pipeline; + this.sensorDevice = new ResearchModeSensorDevice(); + this.requestImuAccessTask = this.sensorDevice.RequestIMUAccessAsync().AsTask(); + this.imuSensor = (ResearchModeImuSensor)this.sensorDevice.GetSensor(sensorType); + this.Out = pipeline.CreateEmitter<(Vector3D Sample, DateTime OriginatingTime)[]>(this, nameof(this.Out)); + } + + /// + /// Gets the IMU stream. + /// + public Emitter<(Vector3D Sample, DateTime OriginatingTime)[]> Out { get; } + + /// + /// Gets the pipeline to which this component belongs. + /// + protected Pipeline Pipeline { get; } + + /// + public void Start(Action notifyCompletionTime) + { + var consent = this.requestImuAccessTask.Result; + this.CheckConsentAndThrow(consent); + + // notify that this is an infinite source component + notifyCompletionTime(DateTime.MaxValue); + + this.captureThread = new Thread(this.CaptureThread); + this.captureThread.Start(); + } + + /// + public void Stop(DateTime finalOriginatingTime, Action notifyCompleted) + { + this.shutdown = true; + this.captureThread.Join(5000); + + notifyCompleted(); + } + + /// + /// Processes a sensor frame received from the sensor. + /// + /// The sensor frame. + protected abstract void ProcessSensorFrame(IResearchModeSensorFrame sensorFrame); + + /// + /// Post sensor frame samples. + /// + /// Type of sensor sample. + /// Sensor frame containing samples. + /// Sensor samples. + /// Function mapping sample to float tuple value. + /// Function getting sample nanoseconds. + protected void PostSamples(IResearchModeSensorFrame sensorFrame, T[] samples, Func toValueFn, Func toNanos) + { + var frameTicks = sensorFrame.GetTimeStamp().HostTicks; + var frameSamples = samples + .Select(sample => + { + var val = toValueFn(sample); + var sensorTicks = (toNanos(sample) - toNanos(samples[0])) / 100; // nanoseconds to ticks + var sampleOriginatingTime = this.Pipeline.GetCurrentTimeFromElapsedTicks((long)(frameTicks + sensorTicks)); + return (new Vector3D(-val.Z, -val.X, val.Y) /* \psi basis */, sampleOriginatingTime); + }) + .Where(sample => sample.sampleOriginatingTime > this.lastSampleOriginatingTime) + .ToArray(); + this.lastSampleOriginatingTime = frameSamples.Last().sampleOriginatingTime; + var frameOriginatingTime = this.Pipeline.GetCurrentTimeFromElapsedTicks((long)frameTicks); + this.Out.Post(frameSamples, frameOriginatingTime); + } + + private void CaptureThread() + { + // ResearchMode requires that OpenStream() and GetNextBuffer() are called from the same thread + this.imuSensor.OpenStream(); + + try + { + while (!this.shutdown) + { + var sensorFrame = this.imuSensor.GetNextBuffer(); + if (!this.shutdown) + { + this.ProcessSensorFrame(sensorFrame); + } + } + } + finally + { + this.imuSensor.CloseStream(); + } + } + + private void CheckConsentAndThrow(ResearchModeSensorConsent consent) + { + switch (consent) + { + case ResearchModeSensorConsent.Allowed: + return; + case ResearchModeSensorConsent.DeniedBySystem: + throw new UnauthorizedAccessException("Access to the IMU was denied by the system"); + case ResearchModeSensorConsent.DeniedByUser: + throw new UnauthorizedAccessException("Access to the IMU was denied by the user"); + case ResearchModeSensorConsent.NotDeclaredByApp: + throw new UnauthorizedAccessException("IMU capability was not declared in the app manifest (DeviceCapability backgroundSpatialPerception)"); + case ResearchModeSensorConsent.UserPromptRequired: + throw new UnauthorizedAccessException("Permission to access to the IMU must be requested first"); + } + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SceneUnderstanding.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SceneUnderstanding.cs new file mode 100644 index 000000000..62e49e5dd --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SceneUnderstanding.cs @@ -0,0 +1,243 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using System.Collections.Generic; + using System.Linq; + using System.Numerics; + using MathNet.Spatial.Euclidean; + using Microsoft.MixedReality.SceneUnderstanding; + using Microsoft.Psi.Components; + using Microsoft.Psi.Spatial.Euclidean; + using StereoKit; + using StereoKit.Framework; + using Windows.Perception.Spatial.Preview; + + /// + /// Component representing perceived scene understanding. + /// + public class SceneUnderstanding : Generator, IProducer, IStepper, IDisposable + { + private readonly Pipeline pipeline; + private readonly SceneUnderstandingConfiguration configuration; + + private CoordinateSystem scenePoseInWorld; + private Scene scene = null; + private (double Width, double Height) placementRectangleSize = (0, 0); + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// The configuration for the component. + public SceneUnderstanding(Pipeline pipeline, SceneUnderstandingConfiguration configuration = null) + : base(pipeline, true) + { + this.pipeline = pipeline; + + // requires Spatial Perception capability + if (!SceneObserver.IsSupported()) + { + throw new Exception("SceneObserver is not supported."); + } + + this.configuration = configuration ??= new (); + this.placementRectangleSize = this.configuration.InitialPlacementRectangleSize; + this.PlacementRectangleSizeInput = pipeline.CreateReceiver<(int Height, int Width)>(this, this.UpdatePlacementRectangleSize, nameof(this.PlacementRectangleSizeInput)); + this.Out = pipeline.CreateEmitter(this, nameof(this.Out)); + + // Defer call to SK.AddStepper(this) to PipelineRun to ensure derived classes have finished construction! + // Otherwise IStepper.Initialize() could get called before this object is fully constructed. + pipeline.PipelineRun += (_, _) => + { + if (SK.AddStepper(this) == default) + { + throw new Exception($"Unable to add {this} as a Stepper to StereoKit."); + } + }; + } + + /// + /// Gets stream of placement size. + /// + public Receiver<(int Width, int Height)> PlacementRectangleSizeInput { get; private set; } + + /// + /// Gets the stream of scene understanding. + /// + public Emitter Out { get; private set; } + + /// + public bool Enabled => true; + + /// + public virtual bool Initialize() + { + return true; + } + + /// + public virtual void Step() + { + } + + /// + public virtual void Shutdown() + { + } + + /// + public void Dispose() + { + this.scene?.Dispose(); + } + + /// + protected override DateTime GenerateNext(DateTime currentTime) + { + SceneObserverAccessStatus accessStatus = SceneObserver.RequestAccessAsync().GetAwaiter().GetResult(); + if (accessStatus != SceneObserverAccessStatus.Allowed) + { + throw new Exception($"SceneObserver access denied: {accessStatus}"); + } + + // Initialize a new Scene + if (this.scene == null) + { + this.scene = SceneObserver.ComputeAsync(this.configuration.SceneQuerySettings, (float)this.configuration.QueryRadius).GetAwaiter().GetResult(); + } + else + { + var lastScene = this.scene; + this.scene = SceneObserver.ComputeAsync(this.configuration.SceneQuerySettings, (float)this.configuration.QueryRadius, this.scene).GetAwaiter().GetResult(); + lastScene.Dispose(); + } + + // Get the transform to convert from scene understanding coordinates to world coordinates + var sceneSpatialCoordinateSystem = SpatialGraphInteropPreview.CreateCoordinateSystemForNode(this.scene.OriginSpatialGraphNodeId); + this.scenePoseInWorld = sceneSpatialCoordinateSystem.TryConvertSpatialCoordinateSystemToPsiCoordinateSystem() ?? new CoordinateSystem(); + + List GetMeshes(IEnumerable sceneObjects, Func> selector) + { + return sceneObjects.SelectMany( + obj => obj.Meshes.Select( + m => ToMesh3D(m, this.GetWorldPose(obj)))).ToList(); + } + + List GetRectangles(IEnumerable sceneObjects) + { + Rectangle3D QuadToRectangle(SceneQuad quad, CoordinateSystem rectanglePose) + { + var w = quad.Extents.X; + var h = quad.Extents.Y; + + return new Rectangle3D( + rectanglePose.Origin, + rectanglePose.YAxis.Negate().Normalize(), + rectanglePose.ZAxis.Normalize(), + -w / 2, + -h / 2, + w, + h); + } + + return sceneObjects.SelectMany( + obj => obj.Quads.Select( + q => QuadToRectangle(q, this.GetWorldPose(obj)))).ToList(); + } + + List GetPlacementRectangles(IEnumerable sceneObjects) + { + if (this.configuration.ComputePlacementRectangles && this.placementRectangleSize.Width > 0 && this.placementRectangleSize.Height > 0) + { + Rectangle3D? QuadToPlacementRectangle(SceneQuad quad, CoordinateSystem rectanglePose) + { + var w = this.placementRectangleSize.Width; + var h = this.placementRectangleSize.Height; + if (!quad.FindCentermostPlacement(new Vector2((float)w, (float)h), out var placement)) + { + return null; // no placement found + } + + // origin is top-left of quad plane, so shift to be relative to the centroid (in 3D) + var placementFromCenter = new Vec3(placement.X - (quad.Extents.X / 2f), placement.Y - (quad.Extents.Y / 2f), 0); + + return new Rectangle3D( + rectanglePose.Transform(placementFromCenter.ToPoint3D(false)), + rectanglePose.YAxis.Negate().Normalize(), + rectanglePose.ZAxis.Normalize(), + -w / 2, + -h / 2, + w, + h); + } + + return sceneObjects.SelectMany( + obj => obj.Quads.Select( + q => QuadToPlacementRectangle(q, this.GetWorldPose(obj)))).ToList(); + } + else + { + return new List(0); + } + } + + var scene = new SceneObjectCollection(); + + foreach (var group in this.scene.SceneObjects.GroupBy(o => o.Kind)) + { + Action setter = group.Key switch + { + SceneObjectKind.Background => x => scene.Background = x, + SceneObjectKind.Ceiling => x => scene.Ceiling = x, + SceneObjectKind.CompletelyInferred => x => scene.Inferred = x, + SceneObjectKind.Floor => x => scene.Floor = x, + SceneObjectKind.Platform => x => scene.Platform = x, + SceneObjectKind.Unknown => x => scene.Unknown = x, + SceneObjectKind.Wall => x => scene.Wall = x, + SceneObjectKind.World => x => scene.World = x, + _ => throw new Exception($"Unexpected scene object kind: {group.Key}"), + }; + + setter(new SceneObjectCollection.SceneObject( + GetMeshes(group, x => x.Meshes), + GetMeshes(group, x => x.ColliderMeshes), + GetRectangles(group), + GetPlacementRectangles(group))); + } + + this.Out.Post(scene, currentTime); + + // Since acquiring the scene understanding information may take a long time, + // if the normal scheduled time is behind the pipeline time, use the + // current pipeline time. + var scheduledTime = currentTime + this.configuration.MinQueryInterval; + var currentPipelineTime = this.pipeline.GetCurrentTime(); + return (currentPipelineTime > scheduledTime) ? currentPipelineTime : scheduledTime; + } + + private static Mesh3D ToMesh3D(SceneMesh mesh, CoordinateSystem meshPose) + { + var vertices = new Vector3[mesh.VertexCount]; + var indices = new uint[mesh.TriangleIndexCount]; + + mesh.GetVertexPositions(vertices); + mesh.GetTriangleIndices(indices); + + return new Mesh3D(vertices.Select(v => meshPose.Transform(v.ToPoint3D(false))).ToArray(), indices); + } + + private void UpdatePlacementRectangleSize((int Width, int Height) size) + { + this.placementRectangleSize = size; + } + + private CoordinateSystem GetWorldPose(SceneObject sceneObject) + { + var posePsiBasis = new CoordinateSystem(sceneObject.GetLocationAsMatrix().ToMathNetMatrix().ChangeBasisHoloLensToPsi()); + return posePsiBasis.TransformBy(this.scenePoseInWorld); + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SceneUnderstandingConfiguration.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SceneUnderstandingConfiguration.cs new file mode 100644 index 000000000..253f97290 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SceneUnderstandingConfiguration.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using Microsoft.MixedReality.SceneUnderstanding; + + /// + /// The configuration for the component. + /// + public class SceneUnderstandingConfiguration + { + /// + /// Gets or sets the minimum time interval at which to query for scene understanding. + /// + public TimeSpan MinQueryInterval { get; set; } = TimeSpan.FromSeconds(1); + + /// + /// Gets or sets the query radius (meters). + /// + public double QueryRadius { get; set; } = 10; + + /// + /// Gets or sets a value indicating whether to enable computation of placement rectangles. + /// + public bool ComputePlacementRectangles { get; set; } = false; + + /// + /// Gets or sets the initial size (in meters) of placement rectangles. + /// + public (double Width, double Height) InitialPlacementRectangleSize { get; set; } = (0, 0); + + /// + /// Gets or sets the scene query settings. + /// + public SceneQuerySettings SceneQuerySettings { get; set; } = new SceneQuerySettings() + { + EnableSceneObjectMeshes = true, + EnableSceneObjectQuads = true, + EnableWorldMesh = true, + EnableOnlyObservedSceneObjects = false, + RequestedMeshLevelOfDetail = SceneMeshLevelOfDetail.Unlimited, + }; + } +} \ No newline at end of file diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SpatialAnchorHelper.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SpatialAnchorHelper.cs new file mode 100644 index 000000000..be4b845fd --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SpatialAnchorHelper.cs @@ -0,0 +1,147 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System.Collections.Concurrent; + using System.Collections.Generic; + using System.Linq; + using MathNet.Spatial.Euclidean; + using Windows.Perception.Spatial; + + /// + /// Represents a helper for spatial anchor operations. + /// + public class SpatialAnchorHelper + { + private readonly SpatialAnchorStore spatialAnchorStore; + private readonly ConcurrentDictionary spatialAnchors = new (); + + /// + /// Initializes a new instance of the class. + /// + /// The spatial anchor store. + public SpatialAnchorHelper(SpatialAnchorStore spatialAnchorStore) + { + this.spatialAnchorStore = spatialAnchorStore; + var persistedAnchors = spatialAnchorStore.GetAllSavedAnchors(); + this.spatialAnchors = new ConcurrentDictionary(persistedAnchors); + } + + /// + /// Creates a persisted spatial anchor at the supplied . + /// + /// The identifier of the spatial anchor. + /// The coordinate system at which to create the spatial anchor. + /// The new spatial anchor, or null if the creation failed. + public SpatialAnchor TryCreateSpatialAnchor(string id, SpatialCoordinateSystem spatialCoordinateSystem) + { + // SpatialAnchor.TryCreateRelativeTo could return null if either the maximum number of + // spatial anchors has been reached, or if the world coordinate system could not be located. + var spatialAnchor = SpatialAnchor.TryCreateRelativeTo(spatialCoordinateSystem); + + if (spatialAnchor != null) + { + // Try to persist the spatial anchor to the store + if (this.spatialAnchorStore.TrySave(id, spatialAnchor)) + { + // Save it in the in-memory dictionary of spatial anchors + this.spatialAnchors[id] = spatialAnchor; + } + else + { + spatialAnchor = null; + } + } + + return spatialAnchor; + } + + /// + /// Creates a persisted spatial anchor at the supplied . + /// + /// The identifier of the spatial anchor. + /// The coordinate system at which to create the spatial anchor. + /// The new spatial anchor, or null if the creation failed. + public SpatialAnchor TryCreateSpatialAnchor(string id, CoordinateSystem coordinateSystem) + { + SpatialAnchor spatialAnchor = null; + var spatialCoordinateSystem = coordinateSystem.TryConvertPsiCoordinateSystemToSpatialCoordinateSystem(); + if (spatialCoordinateSystem != null) + { + spatialAnchor = this.TryCreateSpatialAnchor(id, spatialCoordinateSystem); + } + + return spatialAnchor; + } + + /// + /// Updates the coordinate system of a persisted spatial anchor. + /// + /// The identifier of the spatial anchor. + /// The new coordinate system of the spatial anchor. + /// The updated spatial anchor, or null if the update failed. + /// + /// If the spatial anchor was not found in the store, a new one will be created. Updating + /// an existing spatial anchor with a null coordinate system will cause it to be removed. + /// In this case, the existing spatial anchor is returned. + /// + public SpatialAnchor TryUpdateSpatialAnchor(string id, CoordinateSystem coordinateSystem) + { + this.spatialAnchors.TryGetValue(id, out var spatialAnchor); + if (spatialAnchor?.CoordinateSystem.TryConvertSpatialCoordinateSystemToPsiCoordinateSystem() != coordinateSystem) + { + this.RemoveSpatialAnchor(id); + if (coordinateSystem != null) + { + spatialAnchor = this.TryCreateSpatialAnchor(id, coordinateSystem); + } + } + + return spatialAnchor; + } + + /// + /// Removes the specified spatial anchor from the store. + /// + /// The identifier of the spatial anchor to remove. + public void RemoveSpatialAnchor(string id) + { + if (this.spatialAnchors.TryRemove(id, out _)) + { + this.spatialAnchorStore.Remove(id); + } + } + + /// + /// Gets all spatial anchors in the store. + /// + /// The map of spatial anchors. + public Dictionary GetAllSpatialAnchors() + { + return this.spatialAnchors.ToDictionary(kvp => kvp.Key, kvp => kvp.Value); + } + + /// + /// Gets the coordinate systems for all spatial anchors in the store. + /// + /// The map of spatial anchor coordinate systems. + public Dictionary GetAllSpatialAnchorCoordinateSystems() + { + // Spatial anchors may not always be locatable at all points in time, so the result may contain null values + return new Dictionary( + this.spatialAnchors + .Select(kvp => KeyValuePair.Create(kvp.Key, kvp.Value.CoordinateSystem.TryConvertSpatialCoordinateSystemToPsiCoordinateSystem()))); + } + + /// + /// Gets the specified spatial anchor from the store. + /// + /// The identifier of the spatial anchor. + /// The spatial anchor, or null if it was not found. + public SpatialAnchor TryGetSpatialAnchor(string id) + { + return this.spatialAnchors.TryGetValue(id, out var spatialAnchor) ? spatialAnchor : null; + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SpatialAnchorsSource.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SpatialAnchorsSource.cs new file mode 100644 index 000000000..dd2bb30a8 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/SpatialAnchorsSource.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using System.Collections.Generic; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi.Components; + + /// + /// Component representing a spatial anchor source. + /// + public class SpatialAnchorsSource : Generator, IProducer> + { + private readonly TimeSpan interval; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// The configuration for the component. + public SpatialAnchorsSource(Pipeline pipeline, TimeSpan interval) + : base(pipeline) + { + this.interval = interval; + this.Out = pipeline.CreateEmitter>(this, nameof(this.Out)); + this.Update = pipeline.CreateReceiver>(this, this.ReceiveUpdate, nameof(this.Update)); + } + + /// + /// Gets the stream of spatial anchor poses. + /// + public Emitter> Out { get; private set; } + + /// + /// Gets the receiver for spatial anchor updates. + /// + public Receiver> Update { get; private set; } + + /// + protected override DateTime GenerateNext(DateTime currentTime) + { + this.Out.Post(MixedReality.SpatialAnchorHelper.GetAllSpatialAnchorCoordinateSystems(), currentTime); + return currentTime + this.interval; + } + + private void ReceiveUpdate(Dictionary spatialAnchors, Envelope envelope) + { + foreach (var spatialAnchor in spatialAnchors) + { + MixedReality.SpatialAnchorHelper.TryUpdateSpatialAnchor(spatialAnchor.Key, spatialAnchor.Value); + } + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/UnsafeNative.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/UnsafeNative.cs new file mode 100644 index 000000000..081697eaa --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/UnsafeNative.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using System.Runtime.InteropServices; + + /// + /// Provides unsafe native APIs. + /// + public static class UnsafeNative + { + /// + /// Provides access to an IMemoryBuffer as an array of bytes. + /// + [ComImport] + [Guid("5B0D3235-4DBA-4D44-865E-8F1D0E4FD04D")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + public unsafe interface IMemoryBufferByteAccess + { + /// + /// Gets an IMemoryBuffer as an array of bytes. + /// + /// A pointer to a byte array containing the buffer data. + /// The number of bytes in the returned array. + void GetBuffer(out byte* buffer, out uint capacity); + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/VisibleLightCamera.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/VisibleLightCamera.cs new file mode 100644 index 000000000..b0f517ed5 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/VisibleLightCamera.cs @@ -0,0 +1,102 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using System.Diagnostics; + using HoloLens2ResearchMode; + using Microsoft.Psi; + using Microsoft.Psi.Imaging; + using Windows.Perception; + + /// + /// Visible light camera source component. + /// + public class VisibleLightCamera : ResearchModeCamera + { + private readonly VisibleLightCameraConfiguration configuration; + private DateTime previousOriginatingTime = DateTime.MinValue; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// The configuration for this component. + public VisibleLightCamera(Pipeline pipeline, VisibleLightCameraConfiguration configuration = null) + : base( + pipeline, + (configuration ?? new VisibleLightCameraConfiguration()).Mode, + (configuration ?? new VisibleLightCameraConfiguration()).OutputCalibrationMap, + (configuration ?? new VisibleLightCameraConfiguration()).OutputCalibration) + { + this.configuration = configuration ?? new VisibleLightCameraConfiguration(); + + if (this.configuration.Mode != ResearchModeSensorType.LeftFront && + this.configuration.Mode != ResearchModeSensorType.LeftLeft && + this.configuration.Mode != ResearchModeSensorType.RightFront && + this.configuration.Mode != ResearchModeSensorType.RightRight) + { + throw new ArgumentException($"Initializing the camera in {this.configuration.Mode} mode is not supported."); + } + + this.Image = pipeline.CreateEmitter>(this, nameof(this.Image)); + } + + /// + /// Gets the grayscale image stream. + /// + public Emitter> Image { get; } + + /// + protected override void ProcessSensorFrame(IResearchModeSensorFrame sensorFrame, ResearchModeSensorResolution resolution, ulong frameTicks, DateTime originatingTime) + { + // If we're withing the specified min frame interval, return + if ((originatingTime - this.previousOriginatingTime) <= this.configuration.MinInterframeInterval) + { + return; + } + + if (this.configuration.OutputCalibrationMap && + (originatingTime - this.CalibrationPointsMap.LastEnvelope.OriginatingTime) > this.configuration.OutputCalibrationMapInterval) + { + // Post the calibration map created at the start + this.CalibrationPointsMap.Post(this.GetCalibrationPointsMap(), originatingTime); + } + + if (this.configuration.OutputCalibration) + { + // Post the intrinsics computed at the start + this.CameraIntrinsics.Post(this.GetCameraIntrinsics(), originatingTime); + } + + if (this.configuration.OutputPose) + { + var timestamp = PerceptionTimestampHelper.FromSystemRelativeTargetTime(TimeSpan.FromTicks((long)frameTicks)); + var rigNodeLocation = this.RigNodeLocator.TryLocateAtTimestamp(timestamp, MixedReality.WorldSpatialCoordinateSystem); + + // The rig node may not always be locatable, so we need a null check + if (rigNodeLocation != null) + { + // Compute the camera pose from the rig node location + var cameraWorldPose = this.ToCameraPose(rigNodeLocation); + this.Pose.Post(cameraWorldPose, originatingTime); + } + } + + if (this.configuration.OutputImage) + { + var vlcFrame = sensorFrame as ResearchModeSensorVlcFrame; + var imageBuffer = vlcFrame.GetBuffer(); + int imageWidth = (int)resolution.Width; + int imageHeight = (int)resolution.Height; + + using var image = ImagePool.GetOrCreate(imageWidth, imageHeight, PixelFormat.Gray_8bpp); + Debug.Assert(image.Resource.Size == imageBuffer.Length * sizeof(byte), "Image size does not match raw image buffer size!"); + image.Resource.CopyFrom(imageBuffer); + this.Image.Post(image, originatingTime); + this.previousOriginatingTime = originatingTime; + } + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/VisibleLightCameraConfiguration.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/VisibleLightCameraConfiguration.cs new file mode 100644 index 000000000..be09c7418 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/VisibleLightCameraConfiguration.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using HoloLens2ResearchMode; + + /// + /// Configuration for the component. + /// + public class VisibleLightCameraConfiguration + { + /// + /// Gets or sets a value indicating whether the calibration settings are emitted. + /// + public bool OutputCalibration { get; set; } = true; + + /// + /// Gets or sets a value indicating whether the original map of points for calibration are emitted. + /// + public bool OutputCalibrationMap { get; set; } = true; + + /// + /// Gets or sets the minimum interval between posting calibration map messages. + /// + public TimeSpan OutputCalibrationMapInterval { get; set; } = TimeSpan.FromSeconds(20); + + /// + /// Gets or sets a value indicating whether the camera pose stream is emitted. + /// + public bool OutputPose { get; set; } = true; + + /// + /// Gets or sets a value indicating whether the grayscale image stream is emitted. + /// + public bool OutputImage { get; set; } = true; + + /// + /// Gets or sets the sensor selection. + /// + /// Valid values are: LeftFront, LeftLeft, RightFront, RightRight. + public ResearchModeSensorType Mode { get; set; } = ResearchModeSensorType.LeftFront; + + /// + /// Gets or sets the minumum inter-frame interval. + /// + /// This value can be user to reduce the emitting framerate of the visible light camera. + public TimeSpan MinInterframeInterval { get; set; } = TimeSpan.Zero; + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/stylecop.json b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/stylecop.json new file mode 100644 index 000000000..6f09427eb --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.UniversalWindows/stylecop.json @@ -0,0 +1,16 @@ +{ + // ACTION REQUIRED: This file was automatically added to your project, but it + // will not take effect until additional steps are taken to enable it. See the + // following page for additional information: + // + // https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/EnableConfiguration.md + + "$schema": "https://raw.githubusercontent.com/DotNetAnalyzers/StyleCopAnalyzers/master/StyleCop.Analyzers/StyleCop.Analyzers/Settings/stylecop.schema.json", + "settings": { + "documentationRules": { + "companyName": "Microsoft Corporation", + "copyrightText": "Copyright (c) Microsoft Corporation. All rights reserved.\nLicensed under the MIT license.", + "xmlHeader": false + } + } +} \ No newline at end of file diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/HandVisualizationObject.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/HandVisualizationObject.cs new file mode 100644 index 000000000..6341cb524 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/HandVisualizationObject.cs @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality.Visualization +{ + using System.Windows.Media; + using Microsoft.Psi.MixedReality; + using Microsoft.Psi.Visualization.VisualizationObjects; + + /// + /// Implements a visualization object for tracked hands. + /// + [VisualizationObject("Mixed Reality Tracked Hand")] + public class HandVisualizationObject : Point3DGraphVisualizationObject + { + /// + /// Initializes a new instance of the class. + /// + public HandVisualizationObject() + { + this.EdgeDiameterMm = 10; + this.NodeRadiusMm = 7; + this.NodeColor = Colors.Silver; + this.EdgeColor = Colors.Gray; + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/HandVisualizationObjectAdapter.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/HandVisualizationObjectAdapter.cs new file mode 100644 index 000000000..d3df90963 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/HandVisualizationObjectAdapter.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality.Visualization +{ + using System; + using System.Collections.Generic; + using System.Linq; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi.MixedReality; + using Microsoft.Psi.Visualization.Adapters; + using Microsoft.Psi.Visualization.Data; + using Microsoft.Psi.Visualization.DataTypes; + + /// + /// Implements a stream adapter from single System.Tuple labeled rectangle into lists of System.Tuple labeled rectangles. + /// + [StreamAdapter] + public class HandVisualizationObjectAdapter : StreamAdapter> + { + private static Dictionary<(HandJointIndex Start, HandJointIndex End), bool> HandJointHierarchy { get; } = new List<(HandJointIndex, HandJointIndex)> + { + (HandJointIndex.Wrist, HandJointIndex.Palm), + + (HandJointIndex.Wrist, HandJointIndex.ThumbMetacarpal), + (HandJointIndex.ThumbMetacarpal, HandJointIndex.ThumbProximal), + (HandJointIndex.ThumbProximal, HandJointIndex.ThumbDistal), + (HandJointIndex.ThumbDistal, HandJointIndex.ThumbTip), + + (HandJointIndex.Wrist, HandJointIndex.IndexMetacarpal), + (HandJointIndex.IndexMetacarpal, HandJointIndex.IndexProximal), + (HandJointIndex.IndexProximal, HandJointIndex.IndexIntermediate), + (HandJointIndex.IndexIntermediate, HandJointIndex.IndexDistal), + (HandJointIndex.IndexDistal, HandJointIndex.IndexTip), + + (HandJointIndex.Wrist, HandJointIndex.MiddleMetacarpal), + (HandJointIndex.MiddleMetacarpal, HandJointIndex.MiddleProximal), + (HandJointIndex.MiddleProximal, HandJointIndex.MiddleIntermediate), + (HandJointIndex.MiddleIntermediate, HandJointIndex.MiddleDistal), + (HandJointIndex.MiddleDistal, HandJointIndex.MiddleTip), + + (HandJointIndex.Wrist, HandJointIndex.RingMetacarpal), + (HandJointIndex.RingMetacarpal, HandJointIndex.RingProximal), + (HandJointIndex.RingProximal, HandJointIndex.RingIntermediate), + (HandJointIndex.RingIntermediate, HandJointIndex.RingDistal), + (HandJointIndex.RingDistal, HandJointIndex.RingTip), + + (HandJointIndex.Wrist, HandJointIndex.PinkyMetacarpal), + (HandJointIndex.PinkyMetacarpal, HandJointIndex.PinkyProximal), + (HandJointIndex.PinkyProximal, HandJointIndex.PinkyIntermediate), + (HandJointIndex.PinkyIntermediate, HandJointIndex.PinkyDistal), + (HandJointIndex.PinkyDistal, HandJointIndex.PinkyTip), + }.ToDictionary(j => j, j => true); + + /// + public override Graph GetAdaptedValue(Hand source, Envelope envelope) + { + var dictionary = new Dictionary(); + if (source != null) + { + for (int jointIndex = 0; jointIndex < (int)HandJointIndex.MaxIndex; jointIndex++) + { + if (source.Joints[jointIndex] != null) + { + dictionary.Add((HandJointIndex)jointIndex, source.Joints[jointIndex].Origin); + } + } + } + + return new (dictionary, HandJointHierarchy); + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/Microsoft.Psi.MixedReality.Visualization.Windows.csproj b/Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/Microsoft.Psi.MixedReality.Visualization.Windows.csproj new file mode 100644 index 000000000..c1766691e --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/Microsoft.Psi.MixedReality.Visualization.Windows.csproj @@ -0,0 +1,51 @@ + + + + net472 + Microsoft.Psi.MixedReality.Visualization + Provides visualizers for types defined in Microsoft.Psi.MixedReality. + + + + DEBUG;TRACE + bin\Debug\net472\Microsoft.Psi.MixedReality.Visualization.Windows.xml + true + ..\..\..\Build\Microsoft.Psi.ruleset + true + + + + + bin\Release\net472\Microsoft.Psi.MixedReality.Visualization.Windows.xml + true + ..\..\..\Build\Microsoft.Psi.ruleset + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/stylecop.json b/Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/stylecop.json new file mode 100644 index 000000000..6f09427eb --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality.Visualization.Windows/stylecop.json @@ -0,0 +1,16 @@ +{ + // ACTION REQUIRED: This file was automatically added to your project, but it + // will not take effect until additional steps are taken to enable it. See the + // following page for additional information: + // + // https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/EnableConfiguration.md + + "$schema": "https://raw.githubusercontent.com/DotNetAnalyzers/StyleCopAnalyzers/master/StyleCop.Analyzers/StyleCop.Analyzers/Settings/stylecop.schema.json", + "settings": { + "documentationRules": { + "companyName": "Microsoft Corporation", + "copyrightText": "Copyright (c) Microsoft Corporation. All rights reserved.\nLicensed under the MIT license.", + "xmlHeader": false + } + } +} \ No newline at end of file diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/CalibrationPointsMap.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/CalibrationPointsMap.cs new file mode 100644 index 000000000..5f5787c26 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/CalibrationPointsMap.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + /// + /// Represents a calibration mapping of points, along with sensor width and height. Includes XY values of points + /// on the camera unit plane, one for each image pixel (corresponding to the center of the pixel). + /// These points are laid out row-wise, X then Y, repeating. + /// For the image pixel each point corresponds to, (i,j), it was sampled at + /// the center of the pixel, at position: (i+0.5, j+0.5). + /// + public readonly struct CalibrationPointsMap + { + /// + /// Gets the sensor image width. + /// + public readonly int Width; + + /// + /// Gets the sensor image height. + /// + public readonly int Height; + + /// + /// Gets the set of XY points on the camera unit plane, one for the center of each image pixel. + /// + public readonly float[] CameraUnitPlanePoints; + + /// + /// Initializes a new instance of the struct. + /// + /// The sensor image width. + /// The sensor image height. + /// The set of XY points on the camera unit plane. + /// These points are laid out row-wise, X then Y, repeating. + /// For the image pixel each point corresponds to, (i,j), it was sampled at + /// the center of the pixel, at position: (i+0.5, j+0.5). + public CalibrationPointsMap(int width, int height, float[] cameraUnitPlanePoints) + { + this.Width = width; + this.Height = height; + this.CameraUnitPlanePoints = cameraUnitPlanePoints; + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/EyesSensor.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/EyesSensor.cs new file mode 100644 index 000000000..dc2b1667b --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/EyesSensor.cs @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi.Components; + using StereoKit; + + /// + /// Source component that surfaces eye tracking information on a stream. + /// + /// Applications using this component must enable the Gaze Input capability in Package.appxmanifest. + public class EyesSensor : StereoKitComponent, IProducer, ISourceComponent + { + private readonly Pipeline pipeline; + private readonly TimeSpan interval; + + private bool active; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// Optional interval at which to poll eye tracking information (default 1/60th second). + public EyesSensor(Pipeline pipeline, TimeSpan interval = default) + : base(pipeline) + { + this.pipeline = pipeline; + this.interval = interval == default ? TimeSpan.Zero : interval; + this.Out = pipeline.CreateEmitter(this, nameof(this.Out)); + this.EyesTracked = pipeline.CreateEmitter(this, nameof(this.EyesTracked)); + } + + /// + /// Gets the stream of tracked eyes pose. + /// + public Emitter Out { get; private set; } + + /// + /// Gets the stream of whether eyes are currently tracked. + /// + public Emitter EyesTracked { get; private set; } + + /// + public void Start(Action notifyCompletionTime) + { + this.active = true; + notifyCompletionTime(DateTime.MaxValue); + } + + /// + public void Stop(DateTime finalOriginatingTime, Action notifyCompleted) + { + this.active = false; + notifyCompleted(); + } + + /// + public override void Step() + { + var currentTime = this.pipeline.GetCurrentTime(); + if (this.active && currentTime - this.Out.LastEnvelope.OriginatingTime >= this.interval) + { + var eyes = Input.Eyes; + var eyesTracked = Input.EyesTracked; + var originatingTime = this.pipeline.GetCurrentTime(); + this.Out.Post(eyes.ToPsiCoordinateSystem(), originatingTime); + this.EyesTracked.Post(eyesTracked.IsActive(), originatingTime); + } + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/Hand.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/Hand.cs new file mode 100644 index 000000000..cd4129cb5 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/Hand.cs @@ -0,0 +1,99 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using MathNet.Spatial.Euclidean; + using StereoKit; + + /// + /// Represents a tracked hand. + /// + public class Hand + { + /// + /// Initializes a new instance of the class. + /// + /// Whether hand is being tracked. + /// Whether fingers are pinched. + /// Whether fingers are gripped. + /// Finger joints (index by ). + public Hand(bool isTracked, bool isPinched, bool isGripped, CoordinateSystem[] joints) + { + this.IsTracked = isTracked; + this.IsPinched = isPinched; + this.IsGripped = isGripped; + this.Joints = joints; + } + + /// + /// Gets a value indicating whether hand is being tracked. + /// + public bool IsTracked { get; private set; } + + /// + /// Gets a value indicating whether fingers are pitched. + /// + public bool IsPinched { get; private set; } + + /// + /// Gets a value indicating whether fingers are gripped. + /// + public bool IsGripped { get; private set; } + + /// + /// Gets finger joints in \psi basis (indexed by ). + /// + public CoordinateSystem[] Joints { get; private set; } + + /// + /// Gets the joint in \psi basis specified by a . + /// + /// The joint index. + /// The corresponding joint. + public CoordinateSystem this[HandJointIndex handJointIndex] => this.Joints[(int)handJointIndex]; + + /// + /// Constructs a object from a StereoKit hand. + /// + /// The StereoKit hand. + /// The constructed object. + public static Hand FromStereoKitHand(StereoKit.Hand hand) + { + // note: StereoKit thumbs have no Root, but \psi thumbs have no Intermediate + var joints = new CoordinateSystem[(int)HandJointIndex.MaxIndex]; + joints[(int)HandJointIndex.Palm] = hand.palm.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.Wrist] = hand.wrist.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.ThumbMetacarpal] = hand[FingerId.Thumb, JointId.KnuckleMajor].Pose.ToPsiCoordinateSystem(); // treating as proximal + joints[(int)HandJointIndex.ThumbProximal] = hand[FingerId.Thumb, JointId.KnuckleMid].Pose.ToPsiCoordinateSystem(); // treating as intermediate + joints[(int)HandJointIndex.ThumbDistal] = hand[FingerId.Thumb, JointId.KnuckleMinor].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.ThumbTip] = hand[FingerId.Thumb, JointId.Tip].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.IndexMetacarpal] = hand[FingerId.Index, JointId.Root].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.IndexProximal] = hand[FingerId.Index, JointId.KnuckleMajor].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.IndexIntermediate] = hand[FingerId.Index, JointId.KnuckleMid].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.IndexDistal] = hand[FingerId.Index, JointId.KnuckleMinor].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.IndexTip] = hand[FingerId.Index, JointId.Tip].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.MiddleMetacarpal] = hand[FingerId.Middle, JointId.Root].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.MiddleProximal] = hand[FingerId.Middle, JointId.KnuckleMajor].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.MiddleIntermediate] = hand[FingerId.Middle, JointId.KnuckleMid].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.MiddleDistal] = hand[FingerId.Middle, JointId.KnuckleMinor].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.MiddleTip] = hand[FingerId.Middle, JointId.Tip].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.RingMetacarpal] = hand[FingerId.Ring, JointId.Root].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.RingProximal] = hand[FingerId.Ring, JointId.KnuckleMajor].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.RingIntermediate] = hand[FingerId.Ring, JointId.KnuckleMid].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.RingDistal] = hand[FingerId.Ring, JointId.KnuckleMinor].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.RingTip] = hand[FingerId.Ring, JointId.Tip].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.PinkyMetacarpal] = hand[FingerId.Little, JointId.Root].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.PinkyProximal] = hand[FingerId.Little, JointId.KnuckleMajor].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.PinkyIntermediate] = hand[FingerId.Little, JointId.KnuckleMid].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.PinkyDistal] = hand[FingerId.Little, JointId.KnuckleMinor].Pose.ToPsiCoordinateSystem(); + joints[(int)HandJointIndex.PinkyTip] = hand[FingerId.Little, JointId.Tip].Pose.ToPsiCoordinateSystem(); + + return new Hand( + hand.IsTracked, + hand.IsPinched, + hand.IsGripped, + joints); + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/HandJointIndex.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/HandJointIndex.cs new file mode 100644 index 000000000..5d5541085 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/HandJointIndex.cs @@ -0,0 +1,146 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + /// + /// Defines the enumeration of the hand joints as provided by mixed reality devices. + /// + public enum HandJointIndex + { + /// + /// The palm joint. + /// + Palm = 0, + + /// + /// The wrist joint. + /// + Wrist, + + /// + /// The thumb metacarpal joint. + /// + ThumbMetacarpal, + + /// + /// The thumb proximal joint. + /// + ThumbProximal, + + /// + /// The thumb distal joint. + /// + ThumbDistal, + + /// + /// The thumb tip joint. + /// + ThumbTip, + + /// + /// The index metacarpal joint. + /// + IndexMetacarpal, + + /// + /// The index proximal joint. + /// + IndexProximal, + + /// + /// The index intermediate joint. + /// + IndexIntermediate, + + /// + /// The index distal joint. + /// + IndexDistal, + + /// + /// The index tip joint. + /// + IndexTip, + + /// + /// The middle metacarpal joint. + /// + MiddleMetacarpal, + + /// + /// The middle proximal joint. + /// + MiddleProximal, + + /// + /// The middle intermediate joint. + /// + MiddleIntermediate, + + /// + /// The middle distal joint. + /// + MiddleDistal, + + /// + /// The middle tip joint. + /// + MiddleTip, + + /// + /// The ring metacarpal joint. + /// + RingMetacarpal, + + /// + /// The ring proximal joint. + /// + RingProximal, + + /// + /// The ring intermediate joint. + /// + RingIntermediate, + + /// + /// The ring distal joint. + /// + RingDistal, + + /// + /// The ring tip joint. + /// + RingTip, + + /// + /// The pink metacarpal joint. + /// + PinkyMetacarpal, + + /// + /// The pinky proximal joint. + /// + PinkyProximal, + + /// + /// The pinky intermediate joint. + /// + PinkyIntermediate, + + /// + /// The pinky distal joint. + /// + PinkyDistal, + + /// + /// The pinky tip joint. + /// + PinkyTip, + + /// + /// Maximum index value. + /// + MaxIndex, + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/Handle.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/Handle.cs new file mode 100644 index 000000000..86b61febf --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/Handle.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi.Components; + using StereoKit; + + /// + /// Component that represents a movable UI handle. + /// + public class Handle : StereoKitComponent, IProducer, ISourceComponent + { + private readonly Pipeline pipeline; + private readonly string id; + private readonly Bounds bounds; + private readonly bool show; + + private bool active; + private Pose pose; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// Initial handle pose. + /// Handle bounds. + /// Whether to show the handle. + public Handle(Pipeline pipeline, CoordinateSystem initialPose, Vector3D bounds, bool showHandle = false) + : base(pipeline) + { + this.pipeline = pipeline; + this.id = Guid.NewGuid().ToString(); + this.pose = initialPose.ToStereoKitPose(); + this.bounds = new Bounds(new Vec3((float)bounds.Y, (float)bounds.Z, (float)bounds.X)); // psi -> SK coordinates + this.show = showHandle; + this.Out = pipeline.CreateEmitter(this, nameof(this.Out)); + } + + /// + /// Gets the stream of the handle's pose. + /// + public Emitter Out { get; private set; } + + /// + public override void Step() + { + var originatingTime = this.pipeline.GetCurrentTime(); + if (this.active) + { + UI.Handle(this.id, ref this.pose, this.bounds, this.show); + this.Out.Post(this.pose.ToPsiCoordinateSystem(), originatingTime); + } + } + + /// + public void Start(Action notifyCompletionTime) + { + this.active = true; + notifyCompletionTime(DateTime.MaxValue); + } + + /// + public void Stop(DateTime finalOriginatingTime, Action notifyCompleted) + { + this.active = false; + notifyCompleted(); + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/HandsSensor.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/HandsSensor.cs new file mode 100644 index 000000000..d558f084f --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/HandsSensor.cs @@ -0,0 +1,107 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using Microsoft.Psi.Components; + using StereoKit; + + /// + /// Source component that produces streams containing information about the tracked hands. + /// + public class HandsSensor : StereoKitComponent, ISourceComponent + { + private readonly Pipeline pipeline; + private readonly TimeSpan interval; + + private bool active; + private bool visible = true; + private bool solid = true; + private Material material = Default.MaterialHand; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// Optional interval at which to poll hand information (default 1/60th second). + /// Optional value indicating whether hands should be rendered (default true). + /// Optional value indicating whether hands participate in StereoKit physics (default true). + /// Optional material used to render the hands (default ). + public HandsSensor(Pipeline pipeline, TimeSpan interval = default, bool visible = true, bool solid = false, Material material = null) + : base(pipeline) + { + this.pipeline = pipeline; + this.interval = interval == default ? TimeSpan.Zero : interval; + this.visible = visible; + this.solid = solid; + this.material = material ?? Default.MaterialHand; + this.Left = pipeline.CreateEmitter(this, nameof(this.Left)); + this.Right = pipeline.CreateEmitter(this, nameof(this.Right)); + this.Visible = pipeline.CreateReceiver(this, v => this.visible = v, nameof(this.Visible)); + this.Solid = pipeline.CreateReceiver(this, s => this.solid = s, nameof(this.Solid)); + this.Material = pipeline.CreateReceiver(this, m => this.material = m, nameof(this.Material)); + } + + /// + /// Gets the stream of left hand information. + /// + public Emitter Left { get; } + + /// + /// Gets the stream of left hand information. + /// + public Emitter Right { get; } + + /// + /// Gets the receiver of a value indicating whether hands should be rendered. + /// + public Receiver Visible { get; } + + /// + /// Gets the receiver of a value indicating whether hands participate in physics. + /// + public Receiver Solid { get; } + + /// + /// Gets the receiver of the material used to render the hands. + /// + public Receiver Material { get; } + + /// + public void Start(Action notifyCompletionTime) + { + this.active = true; + notifyCompletionTime(DateTime.MaxValue); + } + + /// + public void Stop(DateTime finalOriginatingTime, Action notifyCompleted) + { + this.active = false; + notifyCompleted(); + } + + /// + public override void Step() + { + var currentTime = this.pipeline.GetCurrentTime(); + if (this.active && currentTime - this.Left.LastEnvelope.OriginatingTime >= this.interval) + { + var left = Input.Hand(Handed.Left); + var right = Input.Hand(Handed.Right); + var originatingTime = this.pipeline.GetCurrentTime(); + + left.Visible = this.visible; + left.Solid = this.solid; + left.Material = this.material; + this.Left.Post(Hand.FromStereoKitHand(left), originatingTime); + + right.Visible = this.visible; + right.Solid = this.solid; + right.Material = this.material; + this.Right.Post(Hand.FromStereoKitHand(right), originatingTime); + } + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/HeadSensor.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/HeadSensor.cs new file mode 100644 index 000000000..68f88fb57 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/HeadSensor.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi.Components; + using StereoKit; + + /// + /// Source component that produces a stream of head coordinates. + /// + public class HeadSensor : StereoKitComponent, IProducer, ISourceComponent + { + private readonly Pipeline pipeline; + private readonly TimeSpan interval; + + private bool active; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// Optional interval at which to poll head information (default 1/60th second). + public HeadSensor(Pipeline pipeline, TimeSpan interval = default) + : base(pipeline) + { + this.pipeline = pipeline; + this.interval = interval == default ? TimeSpan.Zero : interval; + this.Out = pipeline.CreateEmitter(this, nameof(this.Out)); + } + + /// + /// Gets the stream of tracked head pose. + /// + public Emitter Out { get; private set; } + + /// + public override void Step() + { + var currentTime = this.pipeline.GetCurrentTime(); + if (this.active && currentTime - this.Out.LastEnvelope.OriginatingTime >= this.interval) + { + var head = Input.Head; + var originatingTime = this.pipeline.GetCurrentTime(); + this.Out.Post(head.ToPsiCoordinateSystem(), originatingTime); + } + } + + /// + public void Start(Action notifyCompletionTime) + { + this.active = true; + notifyCompletionTime(DateTime.MaxValue); + } + + /// + public void Stop(DateTime finalOriginatingTime, Action notifyCompleted) + { + this.active = false; + notifyCompleted(); + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/Microphone.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/Microphone.cs new file mode 100644 index 000000000..fc7c6c8f5 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/Microphone.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using System.Threading; + using Microsoft.Psi; + using Microsoft.Psi.Audio; + using Microsoft.Psi.Components; + using SKMicrophone = StereoKit.Microphone; + + /// + /// Component that captures audio from the microphone. + /// + /// Currently only supports 1-channel WAVE_FORMAT_IEEE_FLOAT at 48kHz. + public class Microphone : StereoKitComponent, IProducer, ISourceComponent + { + private readonly Pipeline pipeline; + private readonly MicrophoneConfiguration configuration; + private readonly WaveFormat audioFormat; + private readonly ManualResetEvent isStopping = new ManualResetEvent(false); + private float[] buffer; + private bool active; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// The configuration for the component. + public Microphone(Pipeline pipeline, MicrophoneConfiguration configuration = null) + : base(pipeline) + { + this.pipeline = pipeline; + this.configuration = configuration ?? new MicrophoneConfiguration(); + this.audioFormat = this.configuration.AudioFormat; + this.buffer = new float[(uint)(this.audioFormat.SamplesPerSec * this.configuration.SamplingInterval.TotalSeconds)]; + this.Out = pipeline.CreateEmitter(this, nameof(this.Out)); + } + + /// + public Emitter Out { get; } + + /// + public override bool Initialize() => SKMicrophone.Start(); + + /// + public override void Shutdown() => SKMicrophone.Stop(); + + /// + public void Start(Action notifyCompletionTime) + { + this.active = true; + notifyCompletionTime(DateTime.MaxValue); + } + + /// + public void Stop(DateTime finalOriginatingTime, Action notifyCompleted) + { + this.active = false; + notifyCompleted(); + } + + /// + public unsafe override void Step() + { + int unreadSamples = SKMicrophone.Sound.UnreadSamples; + var originatingTime = this.pipeline.GetCurrentTime(); + + // Check if there are samples to capture + if (this.active && unreadSamples > 0) + { + // Ensure that the sample buffer is large enough + if (unreadSamples > this.buffer.Length) + { + this.buffer = new float[unreadSamples]; + } + + // Read the audio samples + int samples = SKMicrophone.Sound.ReadSamples(ref this.buffer); + + // Convert to bytes and post the AudioBuffer + byte[] audio = new byte[samples * this.audioFormat.BitsPerSample / 8]; + fixed (void* src = this.buffer, dst = audio) + { + Buffer.MemoryCopy(src, dst, audio.Length, audio.Length); + } + + this.Out.Post(new AudioBuffer(audio, this.audioFormat), originatingTime); + } + } + } +} \ No newline at end of file diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/MicrophoneConfiguration.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/MicrophoneConfiguration.cs new file mode 100644 index 000000000..a6df41d64 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/MicrophoneConfiguration.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using Microsoft.Psi.Audio; + + /// + /// The configuration for the component. + /// + public class MicrophoneConfiguration + { + /// + /// Gets or sets the audio sampling interval. + /// + public TimeSpan SamplingInterval { get; set; } = TimeSpan.FromMilliseconds(50); + + /// + /// Gets the audio format. + /// + /// Currently only supports 1-channel WAVE_FORMAT_IEEE_FLOAT at 48kHz. + public WaveFormat AudioFormat { get; } = WaveFormat.CreateIeeeFloat(48000, 1); + } +} \ No newline at end of file diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/Microsoft.Psi.MixedReality.csproj b/Sources/MixedReality/Microsoft.Psi.MixedReality/Microsoft.Psi.MixedReality.csproj new file mode 100644 index 000000000..047612e87 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/Microsoft.Psi.MixedReality.csproj @@ -0,0 +1,43 @@ + + + + Provides data structures and components for processing inputs and rendering in mixed-reality using StereoKit. + netstandard2.0 + ../../../Build/Microsoft.Psi.ruleset + true + + + + true + + bin\Debug\netstandard2.0\Microsoft.Psi.MixedReality.xml + true + + + + true + + bin\Release\netstandard2.0\Microsoft.Psi.MixedReality.xml + true + + + + + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + + + + + diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/Operators.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/Operators.cs new file mode 100644 index 000000000..a47ad92a2 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/Operators.cs @@ -0,0 +1,271 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using System.Numerics; + using MathNet.Numerics.LinearAlgebra.Double; + using MathNet.Spatial.Euclidean; + using StereoKit; + using StereoKitColor = StereoKit.Color; + using SystemDrawingColor = System.Drawing.Color; + + /// + /// Implements operators. + /// + public static partial class Operators + { + private static readonly CoordinateSystem HoloLensBasis = new (default, UnitVector3D.ZAxis.Negate(), UnitVector3D.XAxis.Negate(), UnitVector3D.YAxis); + private static readonly CoordinateSystem HoloLensBasisInverted = HoloLensBasis.Invert(); + + /// + /// Compute a change of basis for the given matrix. From HoloLens basis to \psi basis. + /// + /// The given matrix in HoloLens basis. + /// The converted matrix with \psi basis. + /// /// + /// The HoloLens basis assumes that Forward=-Z, Left=-X, and Up=Y. + /// The \psi basis assumes that Forward=X, Left=Y, and Up=Z. + /// + public static DenseMatrix ChangeBasisHoloLensToPsi(this DenseMatrix holoLensMatrix) + { + return HoloLensBasisInverted * holoLensMatrix * HoloLensBasis; + } + + /// + /// Compute a change of basis for the given matrix. From \psi basis to HoloLens basis. + /// + /// The given matrix in \psi basis. + /// The converted matrix with HoloLens basis. + /// /// + /// The HoloLens basis assumes that Forward=-Z, Left=-X, and Up=Y. + /// The \psi basis assumes that Forward=X, Left=Y, and Up=Z. + /// + public static DenseMatrix ChangeBasisPsiToHoloLens(this DenseMatrix psiMatrix) + { + return HoloLensBasis * psiMatrix * HoloLensBasisInverted; + } + + /// + /// Converts a pose to a \psi , + /// changing basis from HoloLens to \psi and transforming from StereoKit coordinates to world coordinates. + /// + /// The to be converted. + /// The . + /// + /// The HoloLens basis assumes that Forward=-Z, Left=-X, and Up=Y. + /// The \psi basis assumes that Forward=X, Left=Y, and Up=Z. + /// "StereoKit coordinates" means "in relation to the pose of the headset at startup". + /// "World coordinates" means "in relation to the world spatial anchor". + /// + public static CoordinateSystem ToPsiCoordinateSystem(this StereoKit.Matrix stereoKitMatrix) + { + Matrix4x4 systemMatrix = stereoKitMatrix; + var mathNetMatrix = systemMatrix.ToMathNetMatrix().ChangeBasisHoloLensToPsi(); + var coordinateSystem = new CoordinateSystem(mathNetMatrix); + return coordinateSystem.TransformBy(StereoKitTransforms.StereoKitStartingPose); + } + + /// + /// Converts a StereoKit to a \psi , + /// changing basis from HoloLens to \psi and transforming from StereoKit coordinates to world coordinates. + /// + /// The StereoKit to be converted. + /// The . + /// + /// The HoloLens basis assumes that Forward=-Z, Left=-X, and Up=Y. + /// The \psi basis assumes that Forward=X, Left=Y, and Up=Z. + /// "StereoKit coordinates" means "in relation to the pose of the headset at startup". + /// "World coordinates" means "in relation to the world spatial anchor". + /// + public static CoordinateSystem ToPsiCoordinateSystem(this Pose pose) + { + return pose.ToMatrix().ToPsiCoordinateSystem(); + } + + /// + /// Converts a pose to a pose, + /// changing basis from \psi to HoloLens and transforming from world coordinates to StereoKit coordinates. + /// + /// The pose to be converted. + /// The . + /// + /// The HoloLens basis assumes that Forward=-Z, Left=-X, and Up=Y. + /// The \psi basis assumes that Forward=X, Left=Y, and Up=Z. + /// "StereoKit coordinates" means "in relation to the pose of the headset at startup". + /// "World coordinates" means "in relation to the world spatial anchor". + /// + public static StereoKit.Matrix ToStereoKitMatrix(this CoordinateSystem coordinateSystem) + { + var mathNetMatrix = coordinateSystem.TransformBy(StereoKitTransforms.StereoKitStartingPoseInverse).ChangeBasisPsiToHoloLens(); + return new StereoKit.Matrix(mathNetMatrix.ToSystemNumericsMatrix()); + } + + /// + /// Converts a pose to a StereoKit , + /// changing basis from \psi to HoloLens and transforming from world coordinates to StereoKit coordinates. + /// + /// The pose to be converted. + /// The . + /// + /// The HoloLens basis assumes that Forward=-Z, Left=-X, and Up=Y. + /// The \psi basis assumes that Forward=X, Left=Y, and Up=Z. + /// "StereoKit coordinates" means "in relation to the pose of the headset at startup". + /// "World coordinates" means "in relation to the world spatial anchor". + /// + public static Pose ToStereoKitPose(this CoordinateSystem coordinateSystem) + { + return coordinateSystem.ToStereoKitMatrix().Pose; + } + + /// + /// Converts a to a . + /// + /// The System.Numerics matrix. + /// The MathNet dense matrix. + public static DenseMatrix ToMathNetMatrix(this Matrix4x4 systemNumericsMatrix) + { + // Values are stored column-major. + var values = new double[] + { + systemNumericsMatrix.M11, + systemNumericsMatrix.M12, + systemNumericsMatrix.M13, + systemNumericsMatrix.M14, + systemNumericsMatrix.M21, + systemNumericsMatrix.M22, + systemNumericsMatrix.M23, + systemNumericsMatrix.M24, + systemNumericsMatrix.M31, + systemNumericsMatrix.M32, + systemNumericsMatrix.M33, + systemNumericsMatrix.M34, + systemNumericsMatrix.M41, + systemNumericsMatrix.M42, + systemNumericsMatrix.M43, + systemNumericsMatrix.M44, + }; + + return new DenseMatrix(4, 4, values); + } + + /// + /// Converts a to a . + /// + /// The MathNet dense matrix. + /// The System.Numerics matrix. + public static Matrix4x4 ToSystemNumericsMatrix(this DenseMatrix mathNetMatrix) + { + var values = mathNetMatrix.Values; + return new Matrix4x4( + (float)values[0], + (float)values[1], + (float)values[2], + (float)values[3], + (float)values[4], + (float)values[5], + (float)values[6], + (float)values[7], + (float)values[8], + (float)values[9], + (float)values[10], + (float)values[11], + (float)values[12], + (float)values[13], + (float)values[14], + (float)values[15]); + } + + /// + /// Convert to , changing the basis from \psi to HoloLens. + /// + /// to be converted. + /// If true, transform from world coordinates to StereoKit coordinates. + /// . + /// + /// The HoloLens basis assumes that Forward=-Z, Left=-X, and Up=Y. + /// The \psi basis assumes that Forward=X, Left=Y, and Up=Z. + /// "StereoKit coordinates" means "in relation to the pose of the headset at startup". + /// "World coordinates" means "in relation to the world spatial anchor". + /// + public static Vec3 ToVec3(this Point3D point3d, bool transformWorldToStereoKit = true) + { + if (transformWorldToStereoKit) + { + point3d = StereoKitTransforms.StereoKitStartingPoseInverse.Transform(point3d); + } + + // Change of basis happening here: + return new Vec3(-(float)point3d.Y, (float)point3d.Z, -(float)point3d.X); + } + + /// + /// Convert to , changing the basis from HoloLens to \psi. + /// + /// to be converted. + /// If true, transform from StereoKit coordinates to world coordinates. + /// . + /// + /// The HoloLens basis assumes that Forward=-Z, Left=-X, and Up=Y. + /// The \psi basis assumes that Forward=X, Left=Y, and Up=Z. + /// "StereoKit coordinates" means "in relation to the pose of the headset at startup". + /// "World coordinates" means "in relation to the world spatial anchor". + /// + public static Point3D ToPoint3D(this Vec3 vec3, bool transformStereoKitToWorld = true) + { + var point3D = new Point3D(-vec3.z, -vec3.x, vec3.y); + + if (transformStereoKitToWorld) + { + return StereoKitTransforms.StereoKitStartingPose.Transform(point3D); + } + else + { + return point3D; + } + } + + /// + /// Convert to , changing the basis from HoloLens to \psi. + /// + /// to be converted. + /// If true, transform from StereoKit coordinates to world coordinates. + /// . + /// + /// The HoloLens basis assumes that Forward=-Z, Left=-X, and Up=Y. + /// The \psi basis assumes that Forward=X, Left=Y, and Up=Z. + /// "StereoKit coordinates" means "in relation to the pose of the headset at startup". + /// "World coordinates" means "in relation to the world spatial anchor". + /// + public static Point3D ToPoint3D(this Vector3 vector3, bool transformStereoKitToWorld = true) + { + Vec3 v = vector3; + return v.ToPoint3D(transformStereoKitToWorld); + } + + /// + /// Converts a specified to a . + /// + /// The . + /// The corresponding . + public static StereoKitColor ToStereoKitColor(this SystemDrawingColor color) + => new ((float)color.R / 255, (float)color.G / 255, (float)color.B / 255, (float)color.A / 255); + + /// + /// Convert stream of frames of IMU samples to flattened stream of samples within. + /// + /// Stream of IMU frames. + /// Stream of IMU samples. + public static IProducer SelectManyImuSamples(this IProducer<(Vector3D Sample, DateTime OriginatingTime)[]> source) + { + return source.Process<(Vector3D Sample, DateTime OriginatingTime)[], Vector3D>((samples, envelope, emitter) => + { + foreach (var sample in samples) + { + emitter.Post(sample.Sample, sample.OriginatingTime); + } + }); + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/Mesh3DListStereoKitRenderer.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/Mesh3DListStereoKitRenderer.cs new file mode 100644 index 000000000..d6f4ad4f3 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/Mesh3DListStereoKitRenderer.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System.Collections.Generic; + using System.Linq; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi.Spatial.Euclidean; + using StereoKit; + + /// + /// Component that visually renders a list of meshes. + /// + public class Mesh3DListStereoKitRenderer : ModelBasedStereoKitRenderer, IConsumer> + { + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// Geometry pose. + /// Geometry scale. + /// Material color. + /// Visibility. + /// Whether to render as model as wireframe only. + public Mesh3DListStereoKitRenderer(Pipeline pipeline, CoordinateSystem pose, Vector3D scale, System.Drawing.Color color, bool visible = true, bool wireframe = false) + : base(pipeline, pose, scale, color, visible, wireframe) + { + this.In = pipeline.CreateReceiver>(this, this.UpdateMeshes, nameof(this.In)); + } + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// Visibility. + /// Whether to render as model as wireframe only. + public Mesh3DListStereoKitRenderer(Pipeline pipeline, bool visible = true, bool wireframe = true) + : this(pipeline, new CoordinateSystem(), new Vector3D(1, 1, 1), System.Drawing.Color.White, visible, wireframe) + { + } + + /// + /// Gets the receiver for meshes. + /// + public Receiver> In { get; private set; } + + /// + public override bool Initialize() + { + base.Initialize(); + this.Material.FaceCull = Cull.None; + return true; + } + + private void UpdateMeshes(List meshes) + { + static Mesh ToStereoKitMesh(Mesh3D mesh3d) + { + var verts = mesh3d.Vertices.Select(v => new Vertex(new Vec3((float)-v.Y, (float)v.Z, (float)-v.X), Vec3.One)).ToArray(); // TODO: surface normal? + var mesh = new Mesh(); + mesh.SetInds(mesh3d.TriangleIndices); + mesh.SetVerts(verts); + return mesh; + } + + var model = new Model(); + foreach (var mesh in meshes) + { + model.AddNode(null, Matrix.Identity, ToStereoKitMesh(mesh), this.Material); + } + + this.Model = model; + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/MeshStereoKitRenderer.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/MeshStereoKitRenderer.cs new file mode 100644 index 000000000..1e3cd394a --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/MeshStereoKitRenderer.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System.IO; + using System.Reflection; + using MathNet.Spatial.Euclidean; + using StereoKit; + + /// + /// Component that visually renders a mesh. + /// + public class MeshStereoKitRenderer : ModelBasedStereoKitRenderer + { + private readonly Mesh mesh; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// Geometry mesh. + /// Geometry pose. + /// Geometry scale. + /// Material color. + /// Visibility. + /// Whether to render as model as wireframe only. + public MeshStereoKitRenderer(Pipeline pipeline, Mesh mesh, CoordinateSystem pose, Vector3D scale, System.Drawing.Color color, bool visible = true, bool wireframe = false) + : base(pipeline, pose, scale, color, visible, wireframe) + { + this.mesh = mesh; + } + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// Geometry mesh. + public MeshStereoKitRenderer(Pipeline pipeline, Mesh mesh) + : this(pipeline, mesh, new CoordinateSystem(), new Vector3D(1, 1, 1), System.Drawing.Color.White) + { + } + + /// + /// Get a mesh from an embedded resource asset. + /// + /// Name of resource. + /// StereoKit Mesh. + public static Mesh CreateMeshFromEmbeddedResource(string name) + { + using var stream = Assembly.GetCallingAssembly().GetManifestResourceStream(name); + using var mem = new MemoryStream(); + stream.CopyTo(mem); + return Model.FromMemory(name, mem.ToArray()).Visuals[0].Mesh; + } + + /// + public override bool Initialize() + { + base.Initialize(); + this.Model = Model.FromMesh(this.mesh, this.Material); + return true; + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/ModelBasedStereoKitRenderer.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/ModelBasedStereoKitRenderer.cs new file mode 100644 index 000000000..10377e658 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/ModelBasedStereoKitRenderer.cs @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using MathNet.Spatial.Euclidean; + using StereoKit; + + /// + /// Base class for StereoKit model-based rendering components. + /// + public abstract class ModelBasedStereoKitRenderer : StereoKitComponent + { + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// Geometry pose. + /// Geometry scale. + /// Material color. + /// Visibility. + /// Whether to render as model as wireframe only. + public ModelBasedStereoKitRenderer(Pipeline pipeline, CoordinateSystem pose, Vector3D scale, System.Drawing.Color color, bool visible = true, bool wireframe = false) + : base(pipeline) + { + this.Color = color; + this.Visible = visible; + this.Wireframe = wireframe; + + // Convert pose and scale to StereoKit basis. + this.PoseMatrix = pose.ToStereoKitMatrix(); + this.Scale = new Vec3((float)scale.Y, (float)scale.Z, (float)scale.X); + + this.ColorInput = pipeline.CreateReceiver(this, this.ReceiveColor, nameof(this.ColorInput)); + this.PoseInput = pipeline.CreateReceiver(this, this.ReceivePose, nameof(this.PoseInput)); + this.ScaleInput = pipeline.CreateReceiver(this, this.ReceiveScale, nameof(this.ScaleInput)); + this.VisibleInput = pipeline.CreateReceiver(this, this.ReceiveVisible, nameof(this.VisibleInput)); + this.WireframeInput = pipeline.CreateReceiver(this, this.ReceiveWireframe, nameof(this.WireframeInput)); + } + + /// + /// Gets receiver for material color. + /// + public Receiver ColorInput { get; private set; } + + /// + /// Gets receiver for geometry pose (in \psi basis). + /// + public Receiver PoseInput { get; private set; } + + /// + /// Gets receiver for geometry scale (in \psi basis). + /// + public Receiver ScaleInput { get; private set; } + + /// + /// Gets receiver for visibility. + /// + public Receiver VisibleInput { get; private set; } + + /// + /// Gets receiver for wireframe indicator. + /// + public Receiver WireframeInput { get; private set; } + + /// + /// Gets or sets material. + /// + protected Material Material { get; set; } + + /// + /// Gets or sets geometry model. + /// + protected Model Model { get; set; } + + /// + /// Gets or sets the model transform. + /// + protected Matrix ModelTransform { get; set; } + + /// + /// Gets or sets the color. + /// + protected System.Drawing.Color Color { get; set; } + + /// + /// Gets or sets the pose as a Matrix (in StereoKit basis). + /// + protected Matrix PoseMatrix { get; set; } + + /// + /// Gets or sets the scale (in StereoKit basis). + /// + protected Vec3 Scale { get; set; } + + /// + /// Gets or sets a value indicating whether the renderer is visible. + /// + protected bool Visible { get; set; } + + /// + /// Gets or sets a value indicating whether to render the model as wireframe-only. + /// + protected bool Wireframe { get; set; } + + /// + public override bool Initialize() + { + this.UpdateMaterial(); + this.UpdateModelTransform(); + + return true; + } + + /// + public override void Step() + { + if (this.Visible) + { + this.Model?.Draw(this.ModelTransform); + } + } + + /// + /// Updates the material based on the other properties. + /// + protected virtual void UpdateMaterial() + { + this.Material ??= Default.Material.Copy(); + this.Material[MatParamName.ColorTint] = this.Color.ToStereoKitColor(); + this.Material.Wireframe = this.Wireframe; + + if (this.Model != null) + { + this.Model.Visuals[0].Material = this.Material; + } + } + + /// + /// Updates the model transform. + /// + protected virtual void UpdateModelTransform() + { + this.ModelTransform = Matrix.S(this.Scale) * this.PoseMatrix; + } + + private void ReceiveColor(System.Drawing.Color color) + { + this.Color = color; + this.UpdateMaterial(); + } + + private void ReceivePose(CoordinateSystem pose) + { + this.PoseMatrix = pose.ToStereoKitMatrix(); + this.UpdateModelTransform(); + } + + private void ReceiveScale(Vector3D scale) + { + this.Scale = new Vec3((float)scale.Y, (float)scale.Z, (float)scale.X); + this.UpdateModelTransform(); + } + + private void ReceiveVisible(bool visible) + { + this.Visible = visible; + } + + private void ReceiveWireframe(bool wireframe) + { + this.Wireframe = wireframe; + this.UpdateMaterial(); + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/Rectangle3DListStereoKitRenderer.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/Rectangle3DListStereoKitRenderer.cs new file mode 100644 index 000000000..b9bdaa2a1 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/Renderers/Rectangle3DListStereoKitRenderer.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System.Collections.Generic; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi.Spatial.Euclidean; + using StereoKit; + + /// + /// Component that visually renders a list of 3D rectangles. + /// + public class Rectangle3DListStereoKitRenderer : ModelBasedStereoKitRenderer, IConsumer> + { + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// Geometry pose. + /// Material color. + /// Visibility. + /// Whether to render as model as wireframe only. + public Rectangle3DListStereoKitRenderer(Pipeline pipeline, CoordinateSystem pose, System.Drawing.Color color, bool visible = true, bool wireframe = false) + : base(pipeline, pose, new Vector3D(1, 1, 1), color, visible, wireframe) + { + this.In = pipeline.CreateReceiver>(this, this.UpdateRectangles, nameof(this.In)); + } + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// Initial visibility. + public Rectangle3DListStereoKitRenderer(Pipeline pipeline, bool visible = true) + : this(pipeline, new CoordinateSystem(), System.Drawing.Color.White, visible) + { + } + + /// + /// Gets the receiver for rectangles. + /// + public Receiver> In { get; private set; } + + /// + public override bool Initialize() + { + base.Initialize(); + this.Material.FaceCull = Cull.None; + return true; + } + + private static Vertex[] ConvertToStereoKitVertices(Rectangle3D rect) + { + // Convert rectangle points and normal into StereoKit vertices. We only need to change basis, + // and do not need to change from world to StereoKit coordinates, because that is already done + // by the parent Model that these mesh vertices will be attached to. + var normal = (rect.BottomRight - rect.BottomLeft).CrossProduct(rect.TopLeft - rect.BottomLeft).Normalize(); + var stereoKitNormal = normal.ToPoint3D().ToVec3(false); + + return new Vertex[] + { + new Vertex(rect.TopLeft.ToVec3(false), stereoKitNormal), + new Vertex(rect.TopRight.ToVec3(false), stereoKitNormal), + new Vertex(rect.BottomLeft.ToVec3(false), stereoKitNormal), + new Vertex(rect.BottomRight.ToVec3(false), stereoKitNormal), + }; + } + + private void UpdateRectangles(List rectangles) + { + static Mesh ToQuadMesh(Rectangle3D rect) + { + var mesh = new Mesh(); + mesh.SetVerts(ConvertToStereoKitVertices(rect)); + mesh.SetInds(new uint[] { 3, 1, 0, 2, 3, 0 }); // two triangles from corner vertices + return mesh; + } + + var model = new Model(); + foreach (var rect in rectangles) + { + model.AddNode(null, Matrix.Identity, ToQuadMesh(rect), this.Material); + } + + this.Model = model; + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/SceneObjectCollection.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/SceneObjectCollection.cs new file mode 100644 index 000000000..5c2df102e --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/SceneObjectCollection.cs @@ -0,0 +1,139 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System.Collections.Generic; + using Microsoft.Psi.Spatial.Euclidean; + + /// + /// Represents a scene understanding object collection. + /// + public class SceneObjectCollection + { + /// + /// Initializes a new instance of the class. + /// + /// Background object. + /// Ceiling object. + /// Floor object. + /// Inferred object. + /// Platform object. + /// Unknown object. + /// Wall object. + /// World object. + public SceneObjectCollection( + SceneObject background, + SceneObject ceiling, + SceneObject floor, + SceneObject inferred, + SceneObject platform, + SceneObject unknown, + SceneObject wall, + SceneObject world) + { + this.Background = background; + this.Ceiling = ceiling; + this.Floor = floor; + this.Inferred = inferred; + this.Platform = platform; + this.Unknown = unknown; + this.Wall = wall; + this.World = world; + } + + /// + /// Initializes a new instance of the class. + /// + public SceneObjectCollection() + : this(SceneObject.Empty, SceneObject.Empty, SceneObject.Empty, SceneObject.Empty, SceneObject.Empty, SceneObject.Empty, SceneObject.Empty, SceneObject.Empty) + { + } + + /// + /// Gets or sets the background scene object. + /// + public SceneObject Background { get; set; } + + /// + /// Gets or sets the background scene object. + /// + public SceneObject Ceiling { get; set; } + + /// + /// Gets or sets the background scene object. + /// + public SceneObject Floor { get; set; } + + /// + /// Gets or sets the background scene object. + /// + public SceneObject Inferred { get; set; } + + /// + /// Gets or sets the background scene object. + /// + public SceneObject Platform { get; set; } + + /// + /// Gets or sets the background scene object. + /// + public SceneObject Unknown { get; set; } + + /// + /// Gets or sets the background scene object. + /// + public SceneObject Wall { get; set; } + + /// + /// Gets or sets the background scene object. + /// + public SceneObject World { get; set; } + + /// + /// Represents a scene object. + /// + public class SceneObject + { + /// + /// Initializes a new instance of the class. + /// + /// Meshes. + /// Collider meshes. + /// Rectangles. + /// Centermost placement rectangles. + public SceneObject(List meshes, List colliderMeshes, List rectangles, List placementRectangles) + { + this.Meshes = meshes; + this.ColliderMeshes = colliderMeshes; + this.Rectangles = rectangles; + this.PlacementRectangles = placementRectangles; + } + + /// + /// Gets empty singleton instance. + /// + public static SceneObject Empty { get; } = new SceneObject(new List(), new List(), new List(), new List()); + + /// + /// Gets the meshes. + /// + public List Meshes { get; private set; } + + /// + /// Gets the collider meshes. + /// + public List ColliderMeshes { get; private set; } + + /// + /// Gets the quad rectangles. + /// + public List Rectangles { get; private set; } + + /// + /// Gets the centermost placement rectangles. + /// + public List PlacementRectangles { get; private set; } + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/SpatialSound.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/SpatialSound.cs new file mode 100644 index 000000000..dba94dcce --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/SpatialSound.cs @@ -0,0 +1,115 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using System.IO; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi.Audio; + using StereoKit; + + /// + /// Component that implements a spatial sound renderer. + /// + public class SpatialSound : StereoKitComponent, IConsumer + { + private Sound sound; + private SoundInst soundInst; + private Vec3 position; + private float volume; + private bool playing = false; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// Initial position of spatial sound. + /// Intial audio volume (0-1, default 1). + public SpatialSound(Pipeline pipeline, Point3D initialPosition, double initialVolume = 1) + : base(pipeline) + { + this.position = new CoordinateSystem(initialPosition, UnitVector3D.XAxis, UnitVector3D.YAxis, UnitVector3D.ZAxis).ToStereoKitMatrix().Translation; + this.volume = (float)initialVolume; + this.In = pipeline.CreateReceiver(this, this.UpdateAudio, nameof(this.In)); + this.PositionInput = pipeline.CreateReceiver(this, this.UpdatePosition, nameof(this.PositionInput)); + this.VolumeInput = pipeline.CreateReceiver(this, this.UpdateVolume, nameof(this.VolumeInput)); + } + + /// + /// Gets the receiver of audio. + /// + public Receiver In { get; private set; } + + /// + /// Gets receiver for spatial pose. + /// + public Receiver PositionInput { get; private set; } + + /// + /// Gets receiver for audio volume. + /// + public Receiver VolumeInput { get; private set; } + + /// + public override bool Initialize() + { + this.sound = Sound.CreateStream(2f); + return true; + } + + private void UpdateAudio(AudioBuffer audio) + { + var format = audio.Format; + if (format.Channels != 1 || + format.SamplesPerSec != 48000 || + (format.FormatTag != WaveFormatTag.WAVE_FORMAT_IEEE_FLOAT && + format.FormatTag != WaveFormatTag.WAVE_FORMAT_EXTENSIBLE) || + format.BitsPerSample != 32) + { + throw new ArgumentException("Expected 1-channel, 48kHz, float32 audio format."); + } + + using var stream = new MemoryStream(audio.Data, 0, audio.Length); + using var reader = new BinaryReader(stream); + var count = audio.Length / 4; + var samples = new float[count]; + for (var i = 0; i < count; i++) + { + samples[i] = reader.ReadSingle(); + } + + this.sound.WriteSamples(samples); + if (!this.playing) + { + this.soundInst = this.sound.Play(this.position, this.volume); + this.playing = true; + } + } + + private void UpdatePosition(Point3D position) + { + var p = new CoordinateSystem(position, UnitVector3D.XAxis, UnitVector3D.YAxis, UnitVector3D.ZAxis).ToStereoKitMatrix().Translation; + if (this.playing) + { + this.soundInst.Position = p; + } + else + { + this.position = p; + } + } + + private void UpdateVolume(double volume) + { + if (this.playing) + { + this.soundInst.Volume = (float)volume; + } + else + { + this.volume = (float)volume; + } + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/StereoKitComponent.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/StereoKitComponent.cs new file mode 100644 index 000000000..592fcca4f --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/StereoKitComponent.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using System; + using StereoKit; + using StereoKit.Framework; + + /// + /// Base abstract class for implementing StereoKit \psi components. + /// + public abstract class StereoKitComponent : IStepper + { + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + public StereoKitComponent(Pipeline pipeline) + { + // Defer call to SK.AddStepper(this) to PipelineRun to ensure derived classes have finished construction! + // Otherwise IStepper.Initialize() could get called before this object is fully constructed. + pipeline.PipelineRun += (_, _) => + { + if (SK.AddStepper(this) == default) + { + throw new Exception($"Unable to add {this} as a Stepper to StereoKit."); + } + }; + + // Remove this stepper when pipeline is no longer running, otherwise Step() will continue to be called! + pipeline.PipelineCompleted += (_, _) => + { + SK.RemoveStepper(this); + }; + } + + /// + public bool Enabled => true; + + /// + public virtual bool Initialize() => true; + + /// + public virtual void Step() + { + } + + /// + public virtual void Shutdown() + { + } + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/StereoKitTransforms.cs b/Sources/MixedReality/Microsoft.Psi.MixedReality/StereoKitTransforms.cs new file mode 100644 index 000000000..ba0b6ab30 --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/StereoKitTransforms.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.MixedReality +{ + using MathNet.Spatial.Euclidean; + + /// + /// Static StereoKit transforms which are applied in/out of StereoKit from \psi. + /// + public static class StereoKitTransforms + { + /// + /// Gets or sets the starting pose of StereoKit (the headset) in the world (in \psi basis). + /// + public static CoordinateSystem StereoKitStartingPose { get; set; } = new CoordinateSystem(); + + /// + /// Gets or sets the inverse of the StereoKit starting pose in the world. + /// + public static CoordinateSystem StereoKitStartingPoseInverse { get; set; } = new CoordinateSystem(); + } +} diff --git a/Sources/MixedReality/Microsoft.Psi.MixedReality/stylecop.json b/Sources/MixedReality/Microsoft.Psi.MixedReality/stylecop.json new file mode 100644 index 000000000..6f09427eb --- /dev/null +++ b/Sources/MixedReality/Microsoft.Psi.MixedReality/stylecop.json @@ -0,0 +1,16 @@ +{ + // ACTION REQUIRED: This file was automatically added to your project, but it + // will not take effect until additional steps are taken to enable it. See the + // following page for additional information: + // + // https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/EnableConfiguration.md + + "$schema": "https://raw.githubusercontent.com/DotNetAnalyzers/StyleCopAnalyzers/master/StyleCop.Analyzers/StyleCop.Analyzers/Settings/stylecop.schema.json", + "settings": { + "documentationRules": { + "companyName": "Microsoft Corporation", + "copyrightText": "Copyright (c) Microsoft Corporation. All rights reserved.\nLicensed under the MIT license.", + "xmlHeader": false + } + } +} \ No newline at end of file diff --git a/Sources/RealSense/Microsoft.Psi.RealSense.Windows.x64/Properties/AssemblyInfo.cs b/Sources/RealSense/Microsoft.Psi.RealSense.Windows.x64/Properties/AssemblyInfo.cs index 0764bf777..b5a90029e 100644 --- a/Sources/RealSense/Microsoft.Psi.RealSense.Windows.x64/Properties/AssemblyInfo.cs +++ b/Sources/RealSense/Microsoft.Psi.RealSense.Windows.x64/Properties/AssemblyInfo.cs @@ -35,6 +35,6 @@ // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("0.15.49.1")] -[assembly: AssemblyFileVersion("0.15.49.1")] -[assembly: AssemblyInformationalVersion("0.15.49.1-beta")] +[assembly: AssemblyVersion("0.16.92.1")] +[assembly: AssemblyFileVersion("0.16.92.1")] +[assembly: AssemblyInformationalVersion("0.16.92.1-beta")] diff --git a/Sources/RealSense/Microsoft.Psi.RealSense_Interop.Windows.x64/AssemblyInfo.cpp b/Sources/RealSense/Microsoft.Psi.RealSense_Interop.Windows.x64/AssemblyInfo.cpp index 2e27bd0be..d9d49e6ae 100644 --- a/Sources/RealSense/Microsoft.Psi.RealSense_Interop.Windows.x64/AssemblyInfo.cpp +++ b/Sources/RealSense/Microsoft.Psi.RealSense_Interop.Windows.x64/AssemblyInfo.cpp @@ -31,9 +31,9 @@ using namespace System::Security::Permissions; // You can specify all the value or you can default the Revision and Build Numbers // by using the '*' as shown below: -[assembly:AssemblyVersionAttribute("0.15.49.1")]; -[assembly:AssemblyFileVersionAttribute("0.15.49.1")]; -[assembly:AssemblyInformationalVersionAttribute("0.15.49.1-beta")]; +[assembly:AssemblyVersionAttribute("0.16.92.1")]; +[assembly:AssemblyFileVersionAttribute("0.16.92.1")]; +[assembly:AssemblyInformationalVersionAttribute("0.16.92.1-beta")]; [assembly:ComVisible(false)]; diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Format/MessagePackFormat.cs b/Sources/Runtime/Microsoft.Psi.Interop/Format/MessagePackFormat.cs index 9ce42fc63..93e82713f 100644 --- a/Sources/Runtime/Microsoft.Psi.Interop/Format/MessagePackFormat.cs +++ b/Sources/Runtime/Microsoft.Psi.Interop/Format/MessagePackFormat.cs @@ -22,7 +22,7 @@ private MessagePackFormat() /// /// Gets singleton instance. /// - public static MessagePackFormat Instance { get; } = new MessagePackFormat(); + public static MessagePackFormat Instance { get; } = new (); /// public (byte[], int, int) SerializeMessage(dynamic message, DateTime originatingTime) @@ -82,7 +82,7 @@ private dynamic NormalizeValue(dynamic value) var dict = expando as IDictionary; foreach (var kv in value as IDictionary) { - dict[kv.Key as string] = this.NormalizeValue(kv.Value); // potentially recursively + dict[kv.Key.ToString()] = this.NormalizeValue(kv.Value); // potentially recursively } return expando; diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Microsoft.Psi.Interop.csproj b/Sources/Runtime/Microsoft.Psi.Interop/Microsoft.Psi.Interop.csproj index c004c4760..b0695befe 100644 --- a/Sources/Runtime/Microsoft.Psi.Interop/Microsoft.Psi.Interop.csproj +++ b/Sources/Runtime/Microsoft.Psi.Interop/Microsoft.Psi.Interop.csproj @@ -9,11 +9,15 @@ bin\Debug\netstandard2.0\Microsoft.Psi.Interop.xml ../../../Build/Microsoft.Psi.ruleset + true + bin\Release\netstandard2.0\Microsoft.Psi.Interop.xml ../../../Build/Microsoft.Psi.ruleset + true + diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Readme.md b/Sources/Runtime/Microsoft.Psi.Interop/Readme.md index 595e1148f..3184ad17d 100644 --- a/Sources/Runtime/Microsoft.Psi.Interop/Readme.md +++ b/Sources/Runtime/Microsoft.Psi.Interop/Readme.md @@ -17,4 +17,8 @@ Streams in any of these formats may be persisted to or read from disk, or may be ## CLI Tool The [PsiStoreTool](../../Tools/PsiStoreTool/Readme.md) exposes the above facilities as a command-line tool. It may be used to explore available streams in a store, convert [to other formats (MessagePack, JSON, CSV)](../../Runtime/Microsoft.Psi.Interop/Format/Readme.md) -and [persist to disk or send over a message queue](../../Runtime/Microsoft.Psi.Interop/Transport/Readme.md) for consumption by other platforms and languages. \ No newline at end of file +and [persist to disk or send over a message queue](../../Runtime/Microsoft.Psi.Interop/Transport/Readme.md) for consumption by other platforms and languages. + +## Rendezvous + +The [rendezvous system](Rendezvous/Readme.md) maintains and relays information about \psi streams available on the network, allowing a distributed system to negotiate remoting connections. diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/Operators.cs b/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/Operators.cs new file mode 100644 index 000000000..de19c8d42 --- /dev/null +++ b/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/Operators.cs @@ -0,0 +1,126 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Interop.Rendezvous +{ + using System; + using System.Linq; + using Microsoft.Psi.Interop.Serialization; + using Microsoft.Psi.Interop.Transport; + using Microsoft.Psi.Remoting; + + /// + /// Rendezvous related operators. + /// + public static class Operators + { + /// + /// Create a rendezvous endpoint from a . + /// + /// Type of data stream. + /// from which to create endpoint. + /// Address with which to create endpoint. + /// Rendezvous endpoint. + public static Rendezvous.Endpoint ToRendezvousEndpoint(this TcpWriter writer, string address) + { + // Each TcpWriter is an endpoint emitting a single stream + return new Rendezvous.TcpSourceEndpoint(address, writer.Port, new[] { new Rendezvous.Stream(writer.Name, typeof(T)) }); + } + + /// + /// Create a from a . + /// + /// Type of data stream. + /// from which to create . + /// The pipeline to add the component to. + /// The deserializer to use to deserialize messages. + /// An optional parameter indicating whether to use originating times received from the source over the network or to re-timestamp with the current pipeline time upon receiving. + /// An optional name for the TCP source. + /// . + public static TcpSource ToTcpSource(this Rendezvous.TcpSourceEndpoint endpoint, Pipeline pipeline, IFormatDeserializer deserializer, bool useSourceOriginatingTimes = true, string name = null) + => new (pipeline, endpoint.Host, endpoint.Port, deserializer, useSourceOriginatingTimes, name); + + /// + /// Create a rendezvous endpoint from a . + /// + /// from which to create endpoint. + /// Rendezvous endpoint. + public static Rendezvous.Endpoint ToRendezvousEndpoint(this NetMQWriter writer) + { + // Each NetWriter is an endpoint emitting one or more topics/streams. + var endpoint = new Rendezvous.NetMQSourceEndpoint(writer.Address); + foreach (var (name, type) in writer.Topics) + { + endpoint.AddStream(new Rendezvous.Stream(name, type)); + } + + return endpoint; + } + + /// + /// Create a rendezvous endpoint from a . + /// + /// from which to create endpoint. + /// Host address with which to create endpoint. + /// Rendezvous endpoint. + public static Rendezvous.Endpoint ToRendezvousEndpoint(this RemoteClockExporter exporter, string host) + { + return new Rendezvous.RemoteClockExporterEndpoint(host, exporter.Port); + } + + /// + /// Create a from a . + /// + /// Type of data stream. + /// from which to create . + /// The pipeline to add the component to. + /// Topic name. + /// The deserializer to use to deserialize messages. + /// Flag indicating whether or not to post with originating times received over the socket. If false, we ignore them and instead use pipeline's current time. + /// . + public static NetMQSource ToNetMQSource(this Rendezvous.NetMQSourceEndpoint endpoint, Pipeline pipeline, string topic, IFormatDeserializer deserializer, bool useSourceOriginatingTimes = true) + { + return new NetMQSource(pipeline, topic, endpoint.Address, deserializer, useSourceOriginatingTimes); + } + + /// + /// Create a rendezvous endpoint from a . + /// + /// from which to create endpoint. + /// Host name with which to create endpoint. + /// Rendezvous endpoint. + public static Rendezvous.Endpoint ToRendezvousEndpoint(this RemoteExporter exporter, string host) + { + // Each RemoteExporter is an endpoint emitting one or more streams. + var endpoint = new Rendezvous.RemoteExporterEndpoint(host, exporter.Port, exporter.TransportKind); + foreach (var m in exporter.Exporter.Metadata) + { + endpoint.AddStream(new Rendezvous.Stream(m.Name, m.TypeName)); + } + + return endpoint; + } + + /// + /// Create a from a . + /// + /// from which to create . + /// The pipeline to add the component to. + /// . + public static RemoteImporter ToRemoteImporter(this Rendezvous.RemoteExporterEndpoint endpoint, Pipeline pipeline) + { + return new RemoteImporter(pipeline, endpoint.Host, endpoint.Port); + } + + /// + /// Create a from a . + /// + /// from which to create . + /// The pipeline to add the component to. + /// . + public static RemoteClockImporter ToRemoteClockImporter(this Rendezvous.RemoteClockExporterEndpoint endpoint, Pipeline pipeline) + { + return new RemoteClockImporter(pipeline, endpoint.Host, endpoint.Port); + } + } +} diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/Readme.md b/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/Readme.md new file mode 100644 index 000000000..a2c561304 --- /dev/null +++ b/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/Readme.md @@ -0,0 +1,5 @@ +# Interop Rendezvous + +A distributed \psi system may have many separate pipelines running in separate processes, on separate machines, publishing and subscribing to streams being conveyed using various protocols. The rendezvous system allows each pipeline process to advertise its available streams and to discover those of other pipelines. This is accomplished by a centralized "rendezvous point" which maintains and relays endpoint connection and stream information. + +For more information, see [the Rendezvous System wiki page](https://github.com/microsoft/psi/wiki/Rendezvous-System). diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/Rendezvous.cs b/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/Rendezvous.cs new file mode 100644 index 000000000..7e7a630d2 --- /dev/null +++ b/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/Rendezvous.cs @@ -0,0 +1,402 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Interop.Rendezvous +{ + using System; + using System.Collections.Concurrent; + using System.Collections.Generic; + using System.Linq; + using Microsoft.Psi.Remoting; + + /// + /// Component that maintains rendezvous information. + /// + public class Rendezvous + { + /// + /// A rendezvous may know about many processes, each with many endpoints, each with many streams. + /// + private readonly ConcurrentDictionary processes = new (); + + private EventHandler processAdded; + + /// + /// Event raised when processes are added. + /// + /// Includes processes added before subscription. + public event EventHandler ProcessAdded + { + add + { + // inform late-joining handler of currently added processes + foreach (var p in this.Processes) + { + value.Invoke(this, p); + } + + this.processAdded += value; + } + + remove + { + this.processAdded -= value; + } + } + + /// + /// Event raised when processes are removed. + /// + public event EventHandler ProcessRemoved; + + /// + /// Gets the currently known processes. + /// + public IEnumerable Processes + { + get + { + return this.processes.Values; + } + } + + /// + /// Try to add a new process, if not already present. + /// + /// Process to add. + /// A value indicating whether the process was added. + public bool TryAddProcess(Process process) + { + if (this.processes.TryAdd(process.Name, process)) + { + this.processAdded?.Invoke(this, process); + return true; + } + + return false; + } + + /// + /// Try to remove a process if present. + /// + /// Process to remove. + /// A value indicating whether the process was removed. + public bool TryRemoveProcess(Process process) + { + if (this.processes.TryRemove(process.Name, out _)) + { + this.ProcessRemoved?.Invoke(this, process); + return true; + } + + return false; + } + + /// + /// Try to remove a process if present. + /// + /// Name of process to remove. + /// A value indicating whether the process was removed. + public bool TryRemoveProcess(string processName) + { + if (this.TryGetProcess(processName, out Process process)) + { + return this.TryRemoveProcess(process); + } + + return false; + } + + /// + /// Try to get process by name. + /// + /// Process name. + /// Process or null if not found. + /// A value indicating whether named process found. + public bool TryGetProcess(string processName, out Process process) + { + return this.processes.TryGetValue(processName, out process); + } + + /// + /// Represents a remoted stream of data. + /// + public class Stream + { + /// + /// Initializes a new instance of the class. + /// + /// Stream name. + /// Type name of stream data. + public Stream(string streamName, string typeName) + { + this.StreamName = streamName; + this.TypeName = typeName; + } + + /// + /// Initializes a new instance of the class. + /// + /// Stream name. + /// Type of stream data. + public Stream(string streamName, Type type) + : this(streamName, type.AssemblyQualifiedName) + { + } + + /// + /// Gets the stream name. + /// + public string StreamName { get; private set; } + + /// + /// Gets the type name of the stream data. + /// + public string TypeName { get; private set; } + } + + /// + /// Represents an endpoint providing remoted data streams. + /// + public abstract class Endpoint + { + private readonly ConcurrentDictionary streams; + + /// + /// Initializes a new instance of the class. + /// + /// Endpoint streams. + public Endpoint(IEnumerable streams) + { + this.streams = new ConcurrentDictionary(streams.Select(s => new KeyValuePair(s.StreamName, s))); + } + + /// + /// Initializes a new instance of the class. + /// + public Endpoint() + : this(Enumerable.Empty()) + { + } + + /// + /// Gets the streams. + /// + public IEnumerable Streams + { + get { return this.streams.Values; } + } + + /// + /// Add new stream. + /// + /// Endpoint stream to add. + public void AddStream(Stream stream) + { + this.streams.TryAdd(stream.StreamName, stream); + } + } + + /// + /// Represents a simple TCP source endpoint providing remoted data streams. + /// + public class TcpSourceEndpoint : Endpoint + { + /// + /// Initializes a new instance of the class. + /// + /// Host name used by the endpoint. + /// Port number used by the endpoint. + /// Endpoint streams. + public TcpSourceEndpoint(string host, int port, IEnumerable streams) + : base(streams) + { + if (string.IsNullOrEmpty(host)) + { + throw new ArgumentException("Host must be not null or empty."); + } + + this.Host = host; + this.Port = port; + } + + /// + /// Initializes a new instance of the class. + /// + /// Host name used by the endpoint. + /// Port number used by the endpoint. + public TcpSourceEndpoint(string host, int port) + : this(host, port, Enumerable.Empty()) + { + } + + /// + /// Gets the endpoint address. + /// + public string Host { get; private set; } + + /// + /// Gets the endpoint port number. + /// + public int Port { get; private set; } + } + + /// + /// Represents a NetMQ source endpoint providing remoted data streams. + /// + public class NetMQSourceEndpoint : Endpoint + { + /// + /// Initializes a new instance of the class. + /// + /// Address used by the endpoint. + /// Endpoint streams. + public NetMQSourceEndpoint(string address, IEnumerable streams) + : base(streams) + { + this.Address = address; + } + + /// + /// Initializes a new instance of the class. + /// + /// Address used by the endpoint. + public NetMQSourceEndpoint(string address) + : this(address, Enumerable.Empty()) + { + } + + /// + /// Gets the endpoint address. + /// + public string Address { get; private set; } + } + + /// + /// Represents a remote exporter endpoint providing remoted data streams. + /// + public class RemoteExporterEndpoint : Endpoint + { + /// + /// Initializes a new instance of the class. + /// + /// Host name used by the endpoint. + /// Port used by the endpoint. + /// Tranport kind used by the endpoint. + /// Endpoint streams. + public RemoteExporterEndpoint(string host, int port, TransportKind transport, IEnumerable streams) + : base(streams) + { + this.Host = host; + this.Port = port; + this.Transport = transport; + } + + /// + /// Initializes a new instance of the class. + /// + /// Host name used by the endpoint. + /// Port used by the endpoint. + /// Tranport kind used by the endpoint. + public RemoteExporterEndpoint(string host, int port, TransportKind transport) + : this(host, port, transport, Enumerable.Empty()) + { + } + + /// + /// Gets the endpoint host name. + /// + public string Host { get; private set; } + + /// + /// Gets the endpoint port. + /// + public int Port { get; private set; } + + /// + /// Gets the endpoint transport kind. + /// + public TransportKind Transport { get; private set; } + } + + /// + /// Represents a remote clock exporter endpoint providing clock information. + /// + /// + /// Endpoint does not provide any streams. Clock information is exchanged directly. + /// + public class RemoteClockExporterEndpoint : Endpoint + { + /// + /// Initializes a new instance of the class. + /// + /// Host name used by the endpoint. + /// Port used by the endpoint. + public RemoteClockExporterEndpoint(string host, int port) + { + this.Host = host; + this.Port = port; + } + + /// + /// Gets the endpoint host name. + /// + public string Host { get; private set; } + + /// + /// Gets the endpoint port. + /// + public int Port { get; private set; } + } + + /// + /// Represents an application process hosting endpoints. + /// + public class Process + { + private readonly List endpoints; + + /// + /// Initializes a new instance of the class. + /// + /// Unique name by which to refer to the process. + /// Process endpoints. + public Process(string name, IEnumerable endpoints) + { + this.Name = name; + this.endpoints = endpoints.ToList(); + } + + /// + /// Initializes a new instance of the class. + /// + /// Unique name by which to refer to the process. + public Process(string name) + : this(name, Enumerable.Empty()) + { + } + + /// + /// Gets the process name. + /// + public string Name { get; private set; } + + /// + /// Gets the endpoints. + /// + public IEnumerable Endpoints + { + get { return this.endpoints; } + } + + /// + /// Add new endpoint. + /// + /// Process endpoint to add. + public void AddEndpoint(Endpoint endpoint) + { + this.endpoints.Add(endpoint); + } + } + } +} diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/RendezvousClient.cs b/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/RendezvousClient.cs new file mode 100644 index 000000000..ed3b7aeaf --- /dev/null +++ b/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/RendezvousClient.cs @@ -0,0 +1,191 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Interop.Rendezvous +{ + using System; + using System.Diagnostics; + using System.IO; + using System.Net.Sockets; + using System.Threading; + + /// + /// Client which connects to a and relays information. + /// + public class RendezvousClient : RendezvousRelay, IDisposable + { + private readonly string serverAddress; + private readonly int port; + private readonly EventWaitHandle connected = new (false, EventResetMode.ManualReset); + + private TcpClient client; + private BinaryReader reader; + private BinaryWriter writer; + private bool active = false; + private string clientAddress = null; + + /// + /// Initializes a new instance of the class. + /// + /// TCP address to which to connect. + /// Optional TCP port to which to connect. + /// Optional rendezvous instance to relay. + public RendezvousClient(string serverAddress, int port = RendezvousServer.DefaultPort, Rendezvous rendezvous = null) + : base(rendezvous) + { + this.serverAddress = serverAddress; + this.port = port; + this.connected.Reset(); + } + + /// + /// Gets wait handle for server connection being established. + /// + /// This should be waited on prior to trusting the processes list. + public EventWaitHandle Connected => this.connected; + + /// + /// Gets a value indicating whether the client is active. + /// + public bool IsActive => this.active; + + /// + /// Gets the client address (available after connection established). + /// + public string ClientAddress => this.clientAddress; + + /// + /// Start rendezvous client (blocking until connection is established). + /// + public void Start() + { + if (this.active) + { + throw new Exception($"{nameof(RendezvousClient)} already started."); + } + + this.Rendezvous.ProcessAdded += this.ProcessAdded; + this.Rendezvous.ProcessRemoved += this.ProcessRemoved; + while (!this.active) + { + try + { + (this.client = new TcpClient()).Connect(this.serverAddress, this.port); + var stream = this.client.GetStream(); + this.reader = new BinaryReader(stream); + this.writer = new BinaryWriter(stream); + this.writer.Write(RendezvousServer.ProtocolVersion); + this.writer.Flush(); + this.active = true; + new Thread(new ThreadStart(this.ReadFromServer)) { IsBackground = true }.Start(); + } + catch (Exception ex) + { + Trace.WriteLine($"Failed to connect to {nameof(RendezvousServer)} (retrying): {ex.Message}"); + } + } + } + + /// + /// Stop rendezvous client. + /// + public void Stop() + { + TryWriteDisconnect(this.writer); + this.Rendezvous.ProcessAdded -= this.ProcessAdded; + this.Rendezvous.ProcessRemoved -= this.ProcessRemoved; + this.active = false; + this.client?.Close(); + this.client?.Dispose(); + this.client = null; + this.reader?.Dispose(); + this.reader = null; + this.writer?.Dispose(); + this.writer = null; + } + + /// + public void Dispose() + { + this.Stop(); + this.connected.Dispose(); + } + + private void ReadFromServer() + { + try + { + var version = this.reader.ReadInt16(); + if (version != RendezvousServer.ProtocolVersion) + { + var ex = new IOException($"{nameof(RendezvousServer)} protocol mismatch ({version})"); + this.ServerError(ex); + throw ex; + } + + this.clientAddress = this.reader.ReadString(); + + // initialize processes before signaling connected + var count = this.reader.ReadInt32(); + for (var i = 0; i < count; i++) + { + if (!this.ReadProcessUpdate(this.reader)) + { + this.ServerError(new IOException($"{nameof(RendezvousServer)} disconnected.")); + } + } + + this.connected.Set(); + + do + { + if (!this.ReadProcessUpdate(this.reader)) + { + this.ServerError(new IOException($"{nameof(RendezvousServer)} disconnected.")); + } + } + while (this.active && this.client.Connected); + } + catch (Exception ex) + { + this.ServerError(ex); + this.connected.Reset(); + } + } + + private void NotifyServer(Rendezvous.Process process, Action action) + { + try + { + action(process, this.writer); + } + catch (Exception ex) + { + this.ServerError(ex); + } + } + + private void ProcessAdded(object sender, Rendezvous.Process process) + { + if (this.writer != null) + { + this.NotifyServer(process, WriteAddProcess); + } + } + + private void ProcessRemoved(object sender, Rendezvous.Process process) + { + if (this.writer != null) + { + this.NotifyServer(process, WriteRemoveProcess); + } + } + + private void ServerError(Exception ex) + { + Trace.WriteLine($"{nameof(RendezvousServer)} error: {ex.Message}"); + this.Stop(); + this.OnError(ex); + } + } +} diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/RendezvousClient.py b/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/RendezvousClient.py new file mode 100644 index 000000000..0dfba5b92 --- /dev/null +++ b/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/RendezvousClient.py @@ -0,0 +1,209 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT license. + +import socket, struct, threading +from enum import IntEnum + +# Client which connects to a RendezvousServer and relays rendezvous information. +class RendezvousClient: + PROTOCOL_VERSION = 1 + + def __init__(self, host, port = 13331): + self.serverAddress = (host, port) + self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + + def __sendByte(self, b): + self.socket.send(struct.pack('b', b)) + + def __readByte(self): + b, = struct.unpack('b', self.socket.recv(1)) + return b + + def __sendInt(self, c): + self.socket.send(struct.pack(' + /// Base class for and . + /// + public abstract class RendezvousRelay + { + /// + /// Initializes a new instance of the class. + /// + /// Optional rendezvous instance to relay. + public RendezvousRelay(Rendezvous rendezvous = null) + { + this.Rendezvous = rendezvous ?? new Rendezvous(); + } + + /// + /// Event raised when errors occur. + /// + public event EventHandler Error; + + /// + /// Gets the underlying rendezvous. + /// + public Rendezvous Rendezvous { get; private set; } + + /// + /// Write update to add process. + /// + /// Process to add. + /// Writer to which to write update. + protected static void WriteAddProcess(Rendezvous.Process process, BinaryWriter writer) + { + writer.Write((byte)1); // add + writer.Write(process.Name); + writer.Write(process.Endpoints.Count()); + foreach (var endpoint in process.Endpoints) + { + if (endpoint is Rendezvous.TcpSourceEndpoint tcpEndpoint) + { + writer.Write((byte)0); // TcpEndpoint + writer.Write(tcpEndpoint.Host); + writer.Write(tcpEndpoint.Port); + } + else if (endpoint is Rendezvous.NetMQSourceEndpoint netMQEndpoint) + { + writer.Write((byte)1); // NetMQEndpoint + writer.Write(netMQEndpoint.Address); + } + else if (endpoint is Rendezvous.RemoteExporterEndpoint remoteExporterEndpoint) + { + writer.Write((byte)2); // RemoteExporterEndpoint + writer.Write(remoteExporterEndpoint.Host); + writer.Write(remoteExporterEndpoint.Port); + writer.Write((int)remoteExporterEndpoint.Transport); + } + else if (endpoint is Rendezvous.RemoteClockExporterEndpoint remoteClockExporterEndpoint) + { + writer.Write((byte)3); // RemoteClockExporterEndpoint + writer.Write(remoteClockExporterEndpoint.Host); + writer.Write(remoteClockExporterEndpoint.Port); + } + else + { + throw new ArgumentException($"Unknown type of Endpoint ({endpoint.GetType().Name})."); + } + + writer.Write(endpoint.Streams.Count()); + foreach (var stream in endpoint.Streams) + { + writer.Write(stream.StreamName); + writer.Write(stream.TypeName); + } + } + + writer.Flush(); + } + + /// + /// Write update to remove process. + /// + /// Process to remove. + /// Writer to which to write update. + protected static void WriteRemoveProcess(Rendezvous.Process process, BinaryWriter writer) + { + writer.Write((byte)2); // remove + writer.Write(process.Name); + writer.Flush(); + } + + /// + /// Write disconnection signal.. + /// + /// Writer to which to write disconnection signal. + protected static void TryWriteDisconnect(BinaryWriter writer) + { + try + { + writer?.Write((byte)0); // disconnect + writer?.Flush(); + } + catch + { + } + } + + /// + /// Raise error event. + /// + /// Underlying exception. + protected void OnError(Exception ex) + { + this.Error?.Invoke(this, ex); + } + + /// + /// Read process update record. + /// + /// Reader from which to read. + /// A value indicating whether an update was read, otherwise false indicated disconnection. + protected bool ReadProcessUpdate(BinaryReader reader) + { + try + { + switch (reader.ReadByte()) + { + case 0: // disconnect + return false; + case 1: // add process + var process = ReadProcess(reader); + this.Rendezvous.TryAddProcess(process); + return true; + case 2: // remove process + var name = reader.ReadString(); + this.Rendezvous.TryRemoveProcess(name); + return true; + default: + throw new Exception("Unexpected rendezvous action."); + } + } + catch (Exception ex) + { + this.OnError(ex); + return false; + } + } + + /// + /// Read process. + /// + /// Reader from which to deserialize. + /// Process. + private static Rendezvous.Process ReadProcess(BinaryReader reader) + { + var process = new Rendezvous.Process(reader.ReadString()); + + // read endpoint info + var endpointCount = reader.ReadInt32(); + for (var i = 0; i < endpointCount; i++) + { + Rendezvous.Endpoint endpoint; + switch (reader.ReadByte()) + { + case 0: // TcpEndpoint + var address = reader.ReadString(); + var port = reader.ReadInt32(); + endpoint = new Rendezvous.TcpSourceEndpoint(address, port); + break; + case 1: // NetMQEndpoint + endpoint = new Rendezvous.NetMQSourceEndpoint(reader.ReadString()); + break; + case 2: // RemoteExporterEndpoint + var host = reader.ReadString(); + port = reader.ReadInt32(); + var transport = (TransportKind)reader.ReadInt32(); + endpoint = new Rendezvous.RemoteExporterEndpoint(host, port, transport); + break; + case 3: // RemoteClockExporerEndpoint + host = reader.ReadString(); + port = reader.ReadInt32(); + endpoint = new Rendezvous.RemoteClockExporterEndpoint(host, port); + break; + default: + throw new Exception("Unknown type of Endpoint."); + } + + // read stream info + var streamCount = reader.ReadInt32(); + for (var j = 0; j < streamCount; j++) + { + var name = reader.ReadString(); + var typeName = reader.ReadString(); + endpoint.AddStream(new Rendezvous.Stream(name, typeName)); + } + + process.AddEndpoint(endpoint); + } + + return process; + } + } +} diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/RendezvousServer.cs b/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/RendezvousServer.cs new file mode 100644 index 000000000..6f020b65e --- /dev/null +++ b/Sources/Runtime/Microsoft.Psi.Interop/Rendezvous/RendezvousServer.cs @@ -0,0 +1,198 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Interop.Rendezvous +{ + using System; + using System.Collections.Concurrent; + using System.Diagnostics; + using System.IO; + using System.Linq; + using System.Net; + using System.Net.Sockets; + using System.Threading; + + /// + /// Server which accepts one or more connections and relays information. + /// + public class RendezvousServer : RendezvousRelay, IDisposable + { + /// + /// Default TCP port on which to listen for clients. + /// + public const int DefaultPort = 13331; + + /// + /// Protocol version. + /// + internal const short ProtocolVersion = 1; + + private readonly int port; + private readonly ConcurrentDictionary writers = new (); + + private TcpListener listener; + private bool active = false; + + /// + /// Initializes a new instance of the class. + /// + /// Optional TCP port on which to listen for clients. + /// Optional rendezvous instance to relay. + public RendezvousServer(int port = DefaultPort, Rendezvous rendezvous = null) + : base(rendezvous) + { + this.port = port; + } + + /// + /// Gets a value indicating whether the server is active. + /// + public bool IsActive => this.active; + + /// + /// Start rendezvous client (blocking until connection is established). + /// + public void Start() + { + if (this.active) + { + throw new Exception($"{nameof(RendezvousServer)} already started."); + } + + this.Rendezvous.ProcessAdded += (_, process) => this.NotifyClients(process, WriteAddProcess); + this.Rendezvous.ProcessRemoved += (_, process) => this.NotifyClients(process, WriteRemoveProcess); + this.listener = new TcpListener(IPAddress.Any, this.port); + this.active = true; + new Thread(new ThreadStart(this.ListenForClients)) { IsBackground = true }.Start(); + } + + /// + /// Stop rendezvous client. + /// + public void Stop() + { + this.active = false; + + foreach (var writer in this.writers.Values) + { + TryWriteDisconnect(writer); + writer.Dispose(); + } + + this.listener?.Stop(); + this.listener = null; + } + + /// + public void Dispose() + { + this.Stop(); + } + + private void ListenForClients() + { + this.listener.Start(); + do + { + try + { + var client = this.listener.AcceptTcpClient(); + var remoteAddress = client.Client.RemoteEndPoint.ToString().Split(':')[0]; + var stream = client.GetStream(); + var reader = new BinaryReader(stream); + var version = reader.ReadInt16(); + if (version != ProtocolVersion) + { + var ex = new IOException($"{nameof(RendezvousClient)} protocol mismatch ({version})"); + this.ClientError(ex); + continue; + } + + var writer = new BinaryWriter(stream); + var guid = Guid.NewGuid(); + this.writers.TryAdd(guid, writer); + + writer.Write(ProtocolVersion); + writer.Write(remoteAddress); + writer.Write(this.Rendezvous.Processes.Count()); + writer.Flush(); + + // notify client of curent process info + foreach (var process in this.Rendezvous.Processes) + { + WriteAddProcess(process, writer); + } + + new Thread(new ParameterizedThreadStart(this.ReadFromClient)) { IsBackground = true } + .Start(Tuple.Create(reader, guid)); + } + catch (Exception ex) + { + this.ClientError(ex); + } + } + while (this.active && this.listener != null); + } + + private void ReadFromClient(object param) + { + var tuple = param as Tuple; + var reader = tuple.Item1; + var guid = tuple.Item2; + try + { + do + { + if (!this.ReadProcessUpdate(reader)) + { + Trace.WriteLine($"{nameof(RendezvousClient)} disconnected."); + break; + } + } + while (this.active && this.listener != null); + } + catch (Exception ex) + { + this.ClientError(ex); + } + + reader.Dispose(); + if (this.writers.TryRemove(guid, out var writer)) + { + writer.Dispose(); + } + } + + private void NotifyClients(Rendezvous.Process process, Action action) + { + foreach (var kv in this.writers) + { + var writer = kv.Value; + try + { + if (writer.BaseStream.CanWrite) + { + action(process, writer); + } + } + catch (Exception ex) + { + this.ClientError(ex); + if (this.writers.TryRemove(kv.Key, out _)) + { + writer.Dispose(); + } + } + } + } + + private void ClientError(Exception ex) + { + Trace.WriteLine($"{nameof(RendezvousClient)} failed to connect: {ex.Message}"); + if (this.active) + { + this.OnError(ex); // note: only invoked on first error + } + } + } +} diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Serialization/Format{T}.cs b/Sources/Runtime/Microsoft.Psi.Interop/Serialization/Format{T}.cs new file mode 100644 index 000000000..fb5a62b20 --- /dev/null +++ b/Sources/Runtime/Microsoft.Psi.Interop/Serialization/Format{T}.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Interop.Serialization +{ + using System; + using System.IO; + using System.Text; + + /// + /// Helper class for making new formats (implementations of /. + /// + /// Type which is serialized/deserialized. + public class Format : IFormatSerializer, IFormatDeserializer, IDisposable + { + private readonly Func serialize; + private readonly Func deserialize; + private readonly MemoryStream memoryStream = null; + + /// + /// Initializes a new instance of the class. + /// + /// Serialization function. + /// Deserialization function. + public Format( + Func serializeFunc, + Func deserializeFunc) + { + this.serialize = serializeFunc; + this.deserialize = deserializeFunc; + } + + /// + /// Initializes a new instance of the class. + /// + /// Action to serialize using . + /// Function to deserialize using (also given raw payload, offset, length). + /// Serialization format. + public Format( + Action serializeAction, + Func deserializeFunc) + { + this.memoryStream = new MemoryStream(); + + this.serialize = (val, originatingTime) => + { + this.memoryStream.Position = 0; + using var writer = new BinaryWriter(this.memoryStream, Encoding.UTF8, true); + writer.Write(originatingTime.ToFileTimeUtc()); + serializeAction(val, writer); + return (this.memoryStream.GetBuffer(), 0, (int)this.memoryStream.Length); + }; + + this.deserialize = (payload, offset, length) => + { + using var reader = new BinaryReader(new MemoryStream(payload, offset, length), Encoding.UTF8); + var originatingTime = DateTime.FromFileTimeUtc(reader.ReadInt64()); + var val = deserializeFunc(reader, payload, offset, length); + return (val, originatingTime); + }; + } + + /// + /// Initializes a new instance of the class. + /// + /// Action to serialize using . + /// Function to deserialize using (also given raw payload, offset, length). + /// Serialization format. + public Format( + Action serializeAction, + Func deserializeFunc) + : this(serializeAction, (reader, _, _, _) => deserializeFunc(reader)) + { + } + + /// + public (byte[] Bytes, int Index, int Count) SerializeMessage(dynamic message, DateTime originatingTime) + { + return this.serialize(message, originatingTime); + } + + /// + public (dynamic Message, DateTime OriginatingTime) DeserializeMessage(byte[] payload, int index, int count) + { + return this.deserialize(payload, index, count); + } + + /// + public void Dispose() + { + if (this.memoryStream != null) + { + this.memoryStream.Dispose(); + } + } + } +} diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Serialization/IFormatDeserializer.cs b/Sources/Runtime/Microsoft.Psi.Interop/Serialization/IFormatDeserializer.cs index 6f8e348b3..e614384c4 100644 --- a/Sources/Runtime/Microsoft.Psi.Interop/Serialization/IFormatDeserializer.cs +++ b/Sources/Runtime/Microsoft.Psi.Interop/Serialization/IFormatDeserializer.cs @@ -17,6 +17,6 @@ public interface IFormatDeserializer /// Starting index of message data. /// Number of bytes constituting message data. /// Dynamic of primitive or IEnumerable/ExpandoObject of primitive as well as originating time stamp. - (dynamic, DateTime) DeserializeMessage(byte[] payload, int index, int count); + (dynamic Message, DateTime OriginatingTime) DeserializeMessage(byte[] payload, int index, int count); } } \ No newline at end of file diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Serialization/IFormatSerializer.cs b/Sources/Runtime/Microsoft.Psi.Interop/Serialization/IFormatSerializer.cs index 365dd9db3..048b79821 100644 --- a/Sources/Runtime/Microsoft.Psi.Interop/Serialization/IFormatSerializer.cs +++ b/Sources/Runtime/Microsoft.Psi.Interop/Serialization/IFormatSerializer.cs @@ -16,6 +16,6 @@ public interface IFormatSerializer /// Message of any type. /// Originating time of message. /// Serialized bytes, index and count. - (byte[], int, int) SerializeMessage(dynamic message, DateTime originatingTime); + (byte[] Bytes, int Index, int Count) SerializeMessage(dynamic message, DateTime originatingTime); } } diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Serialization/PersistentFormat.cs b/Sources/Runtime/Microsoft.Psi.Interop/Serialization/PersistentFormat.cs new file mode 100644 index 000000000..d61f84b4b --- /dev/null +++ b/Sources/Runtime/Microsoft.Psi.Interop/Serialization/PersistentFormat.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Interop.Serialization +{ + using System; + using System.Collections.Generic; + using System.IO; + + /// + /// Persistent format serializer/deserializer. + /// + /// Type which is serialized/deserialized. + public class PersistentFormat : IPersistentFormatSerializer, IPersistentFormatDeserializer + { + private readonly Func persistHeader; + private readonly Action persistRecord; + private readonly Action persistFooter; + private readonly Func> deserializeRecords; + + /// + /// Initializes a new instance of the class. + /// + /// Header persistence function. + /// Record persistence function. + /// Footer persistence function. + /// Deserialization function. + public PersistentFormat( + Func persistHeader, + Action persistRecord, + Action persistFooter, + Func> deserializeRecords) + { + this.persistHeader = persistHeader; + this.persistRecord = persistRecord; + this.persistFooter = persistFooter; + this.deserializeRecords = deserializeRecords; + } + + /// + public dynamic PersistHeader(dynamic message, Stream stream) + { + return this.persistHeader(message, stream); + } + + /// + public void PersistRecord(dynamic message, DateTime originatingTime, bool first, Stream stream, dynamic state) + { + this.persistRecord(message, originatingTime, first, stream, state); + } + + /// + public void PersistFooter(Stream stream, dynamic state) + { + throw new NotImplementedException(); + } + + /// + public IEnumerable<(dynamic, DateTime)> DeserializeRecords(Stream stream) + { + throw new NotImplementedException(); + } + } +} diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Transport/FileSource.cs b/Sources/Runtime/Microsoft.Psi.Interop/Transport/FileSource.cs index d8bb61e0b..9e3eab022 100644 --- a/Sources/Runtime/Microsoft.Psi.Interop/Transport/FileSource.cs +++ b/Sources/Runtime/Microsoft.Psi.Interop/Transport/FileSource.cs @@ -19,7 +19,7 @@ public class FileSource : Generator /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// File name to which to persist. /// Format serializer with which messages are deserialized. public FileSource(Pipeline pipeline, string filename, IPersistentFormatDeserializer deserializer) diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Transport/FileWriter.cs b/Sources/Runtime/Microsoft.Psi.Interop/Transport/FileWriter.cs index 421c9ac2e..41bbd9038 100644 --- a/Sources/Runtime/Microsoft.Psi.Interop/Transport/FileWriter.cs +++ b/Sources/Runtime/Microsoft.Psi.Interop/Transport/FileWriter.cs @@ -20,7 +20,7 @@ public class FileWriter : IConsumer, IDisposable /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// File name to which to persist. /// Format serializer with which messages are serialized. public FileWriter(Pipeline pipeline, string filename, IPersistentFormatSerializer serializer) diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Transport/NetMQSource.cs b/Sources/Runtime/Microsoft.Psi.Interop/Transport/NetMQSource.cs index c647d5a96..01bb9f3dc 100644 --- a/Sources/Runtime/Microsoft.Psi.Interop/Transport/NetMQSource.cs +++ b/Sources/Runtime/Microsoft.Psi.Interop/Transport/NetMQSource.cs @@ -20,7 +20,7 @@ public class NetMQSource : IProducer, ISourceComponent, IDisposable private readonly string address; private readonly IFormatDeserializer deserializer; private readonly Pipeline pipeline; - private readonly bool useReceivedTimes; + private readonly bool useSourceOriginatingTimes; private SubscriberSocket socket; private NetMQPoller poller; @@ -28,15 +28,15 @@ public class NetMQSource : IProducer, ISourceComponent, IDisposable /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Topic name. /// Connection string. /// Format deserializer with which messages are deserialized. - /// Flag indicating whether or not to post with originating times received over the socket. If false, we ignore them and instead use pipeline's current time. - public NetMQSource(Pipeline pipeline, string topic, string address, IFormatDeserializer deserializer, bool useReceivedTimes = true) + /// Flag indicating whether or not to post with originating times received over the socket. If false, we ignore them and instead use pipeline's current time. + public NetMQSource(Pipeline pipeline, string topic, string address, IFormatDeserializer deserializer, bool useSourceOriginatingTimes = true) { this.pipeline = pipeline; - this.useReceivedTimes = useReceivedTimes; + this.useSourceOriginatingTimes = useSourceOriginatingTimes; this.topic = topic; this.address = address; this.deserializer = deserializer; @@ -106,7 +106,7 @@ private void ReceiveReady(object sender, NetMQSocketEventArgs e) } var (message, originatingTime) = this.deserializer.DeserializeMessage(frames[1], 0, frames[1].Length); - this.Out.Post(message, this.useReceivedTimes ? originatingTime : this.pipeline.GetCurrentTime()); + this.Out.Post(message, this.useSourceOriginatingTimes ? originatingTime : this.pipeline.GetCurrentTime()); } } } diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Transport/NetMQWriter.cs b/Sources/Runtime/Microsoft.Psi.Interop/Transport/NetMQWriter.cs index 7a2b61590..c20001086 100644 --- a/Sources/Runtime/Microsoft.Psi.Interop/Transport/NetMQWriter.cs +++ b/Sources/Runtime/Microsoft.Psi.Interop/Transport/NetMQWriter.cs @@ -4,6 +4,8 @@ namespace Microsoft.Psi.Interop.Transport { using System; + using System.Collections.Generic; + using System.Linq; using Microsoft.Psi.Interop.Serialization; using NetMQ; using NetMQ.Sockets; @@ -15,23 +17,38 @@ public class NetMQWriter : IDisposable { private readonly Pipeline pipeline; private readonly IFormatSerializer serializer; + private readonly Dictionary topics = new (); private PublisherSocket socket; /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Connection string. /// Format serializer with which messages are serialized. public NetMQWriter(Pipeline pipeline, string address, IFormatSerializer serializer) { this.pipeline = pipeline; + this.Address = address; this.serializer = serializer; this.socket = new PublisherSocket(); pipeline.PipelineRun += (s, e) => this.socket.Bind(address); } + /// + /// Gets the connection address string. + /// + public string Address { get; private set; } + + /// + /// Gets the topic names and types being published. + /// + public IEnumerable<(string Name, Type Type)> Topics + { + get { return this.topics.Select(x => (x.Key, x.Value)); } + } + /// /// Add topic receiver. /// @@ -40,6 +57,7 @@ public NetMQWriter(Pipeline pipeline, string address, IFormatSerializer serializ /// Receiver to which to pipe messages. public Receiver AddTopic(string topic) { + this.topics.Add(topic, typeof(T)); return this.pipeline.CreateReceiver(this, (m, e) => this.Receive(m, e, topic), topic); } diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Transport/NetMQWriter{T}.cs b/Sources/Runtime/Microsoft.Psi.Interop/Transport/NetMQWriter{T}.cs index f4e925ad9..b15f5a2dc 100644 --- a/Sources/Runtime/Microsoft.Psi.Interop/Transport/NetMQWriter{T}.cs +++ b/Sources/Runtime/Microsoft.Psi.Interop/Transport/NetMQWriter{T}.cs @@ -17,7 +17,7 @@ public class NetMQWriter : NetMQWriter, IConsumer /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Topic name. /// Connection string. /// Format serializer with which messages are serialized. diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Transport/Readme.md b/Sources/Runtime/Microsoft.Psi.Interop/Transport/Readme.md index 99a1d7ef3..873fd50e0 100644 --- a/Sources/Runtime/Microsoft.Psi.Interop/Transport/Readme.md +++ b/Sources/Runtime/Microsoft.Psi.Interop/Transport/Readme.md @@ -125,7 +125,7 @@ import zmq, random, datetime, json context = zmq.Context() socket = context.socket(zmq.PUB) -socket.bind('tcp://127.0.0.1:12345') +socket.bind('tcp://127.0.0.1:45678') while True: payload = {} @@ -147,7 +147,7 @@ The stream of random doubles can then be easily consumed in Psi: ```csharp using (var p = Pipeline.Create()) { - var mq = new NetMQSource(p, "test-topic", "tcp://localhost:12345", JsonFormat.Instance); + var mq = new NetMQSource(p, "test-topic", "tcp://localhost:45678", JsonFormat.Instance); mq.Do(x => Console.WriteLine($"Message: {x}")); p.Run(); } diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Transport/TcpSource.cs b/Sources/Runtime/Microsoft.Psi.Interop/Transport/TcpSource.cs new file mode 100644 index 000000000..514302b59 --- /dev/null +++ b/Sources/Runtime/Microsoft.Psi.Interop/Transport/TcpSource.cs @@ -0,0 +1,161 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Interop.Transport +{ + using System; + using System.Diagnostics; + using System.IO; + using System.Net.Sockets; + using System.Threading; + using Microsoft.Psi; + using Microsoft.Psi.Components; + using Microsoft.Psi.Interop.Serialization; + + /// + /// Component that reads and deserializes messages from a remote server over TCP. + /// + /// The type of the messages. + public class TcpSource : IProducer, ISourceComponent, IDisposable + { + private static readonly bool IsDisposableT = typeof(IDisposable).IsAssignableFrom(typeof(T)); + private readonly Pipeline pipeline; + private readonly string address; + private readonly int port; + private readonly string name; + private readonly TcpClient client; + private readonly IFormatDeserializer deserializer; + private readonly bool useSourceOriginatingTimes; + private Thread readerThread; + private Action completed; + private DateTime endTime = DateTime.MaxValue; + private byte[] frameBuffer; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// The address of the remote server. + /// The port on which to connect. + /// The deserializer to use to deserialize messages. + /// An optional parameter indicating whether to use originating times from the source received over the network or to re-timestamp with the current pipeline time upon receiving. + /// An optional name for the TCP source. + public TcpSource(Pipeline pipeline, string address, int port, IFormatDeserializer deserializer, bool useSourceOriginatingTimes = true, string name = null) + { + this.pipeline = pipeline; + this.client = new TcpClient(); + this.address = address; + this.port = port; + this.deserializer = deserializer; + this.useSourceOriginatingTimes = useSourceOriginatingTimes; + this.name = name; + this.Out = pipeline.CreateEmitter(this, nameof(this.Out)); + } + + /// + public Emitter Out { get; } + + /// + public void Dispose() => this.client.Close(); + + /// + public void Start(Action notifyCompletionTime) + { + this.completed = notifyCompletionTime; + this.readerThread = new Thread(this.ReadFrames); + this.readerThread.Start(); + } + + /// + public void Stop(DateTime finalOriginatingTime, Action notifyCompleted) + { + this.endTime = finalOriginatingTime; + + // ensures that any pending connection attempt is terminated + this.client.Close(); + + this.readerThread.Join(); + notifyCompleted(); + } + + /// + public override string ToString() => this.name ?? base.ToString(); + + /// + /// Reads a data frame into the frame buffer. Will re-allocate the frame buffer if necessary. + /// + /// The binary reader. + private (dynamic, DateTime) ReadNextFrame(BinaryReader binaryReader) + { + int frameLength = binaryReader.ReadInt32(); + + // ensure that the frame buffer is large enough to accommodate the next frame + if (this.frameBuffer == null || this.frameBuffer.Length < frameLength) + { + this.frameBuffer = new byte[frameLength]; + } + + // read the entire frame into the frame buffer + int bytesRead = binaryReader.Read(this.frameBuffer, 0, frameLength); + while (bytesRead < frameLength) + { + bytesRead += binaryReader.Read(this.frameBuffer, bytesRead, frameLength - bytesRead); + } + + // deserialize the frame bytes into (T, DateTime) + (var data, var originatingTime) = this.deserializer.DeserializeMessage(this.frameBuffer, 0, frameLength); + return this.useSourceOriginatingTimes ? (data, originatingTime) : (data, this.pipeline.GetCurrentTime()); + } + + private void ReadFrames() + { + // ensure that we don't read past the end of the pipeline replay descriptor + this.endTime = this.Out.Pipeline.ReplayDescriptor.End; + + var lastTimestamp = DateTime.MinValue; + + var connected = false; + while (!connected) + { + try + { + Trace.WriteLine($"Attempting to connect to {this.address}:{this.port}"); + this.client.Connect(this.address, this.port); + Trace.WriteLine($"Connected to {this.address}:{this.port}."); + connected = true; + } + catch + { + Trace.WriteLine($"Failed to connect to port {this.address}:{this.port}. Retrying ..."); + } + } + + try + { + using var reader = new BinaryReader(this.client.GetStream()); + + // read and deserialize frames from the stream reader + for (var (message, timestamp) = this.ReadNextFrame(reader); + timestamp <= this.endTime; + lastTimestamp = timestamp, (message, timestamp) = this.ReadNextFrame(reader)) + { + this.Out.Post(message, timestamp); + + if (IsDisposableT) + { + // message is deep-cloned on Post, so dispose it if IDisposable + ((IDisposable)message).Dispose(); + } + } + } + catch + { + } + finally + { + // completion time is last posted message timestamp + this.completed?.Invoke(lastTimestamp); + } + } + } +} diff --git a/Sources/Runtime/Microsoft.Psi.Interop/Transport/TcpWriter.cs b/Sources/Runtime/Microsoft.Psi.Interop/Transport/TcpWriter.cs new file mode 100644 index 000000000..601506f0e --- /dev/null +++ b/Sources/Runtime/Microsoft.Psi.Interop/Transport/TcpWriter.cs @@ -0,0 +1,106 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Interop.Transport +{ + using System; + using System.Diagnostics; + using System.Net; + using System.Net.Sockets; + using System.Threading; + using Microsoft.Psi; + using Microsoft.Psi.Interop.Serialization; + + /// + /// Component that serializes and writes messages to a remote server over TCP. + /// + /// The type of the messages. + public class TcpWriter : IConsumer, IDisposable + { + private readonly IFormatSerializer serializer; + + private TcpListener listener; + private NetworkStream networkStream; + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// Name by which to refer to the data stream. + /// The connection port. + /// The serializer to use to serialize messages. + public TcpWriter(Pipeline pipeline, string name, int port, IFormatSerializer serializer) + { + this.serializer = serializer; + this.Name = name; + this.Port = port; + this.In = pipeline.CreateReceiver(this, this.Receive, nameof(this.In)); + this.listener = new TcpListener(IPAddress.Any, port); + this.Start(); + } + + /// + /// Gets the name by which to refer to the data stream. + /// + public string Name { get; private set; } + + /// + /// Gets the connection port. + /// + public int Port { get; private set; } + + /// + public Receiver In { get; } + + /// + public void Dispose() + { + this.networkStream?.Dispose(); + this.listener.Stop(); + this.listener = null; + } + + private void Receive(T message, Envelope envelope) + { + (var bytes, int offset, int count) = this.serializer.SerializeMessage(message, envelope.OriginatingTime); + + try + { + if (this.networkStream != null) + { + this.networkStream.Write(BitConverter.GetBytes(count), 0, sizeof(int)); + this.networkStream.Write(bytes, offset, count); + } + } + catch (Exception ex) + { + Trace.WriteLine($"TcpWriter Exception: {ex.Message}"); + this.listener.Stop(); + this.networkStream.Dispose(); + this.networkStream = null; + this.Start(); + } + } + + private void Start() + { + new Thread(new ThreadStart(this.Listen)) { IsBackground = true }.Start(); + } + + private void Listen() + { + if (this.listener != null) + { + try + { + this.listener.Start(); + this.networkStream = this.listener.AcceptTcpClient().GetStream(); + } + catch (Exception ex) + { + Trace.WriteLine($"TcpWriter Exception: {ex.Message}"); + } + } + } + } +} diff --git a/Sources/Runtime/Microsoft.Psi/Common/DeliveryPolicy{T}.cs b/Sources/Runtime/Microsoft.Psi/Common/DeliveryPolicy{T}.cs index aadf02395..559cc6375 100644 --- a/Sources/Runtime/Microsoft.Psi/Common/DeliveryPolicy{T}.cs +++ b/Sources/Runtime/Microsoft.Psi/Common/DeliveryPolicy{T}.cs @@ -82,7 +82,7 @@ public class DeliveryPolicy public static implicit operator DeliveryPolicy(DeliveryPolicy policy) { return policy == null ? null : new DeliveryPolicy( - DeliveryPolicy.DefaultInitialQueueSize, + policy.InitialQueueSize, policy.MaximumQueueSize, policy.MaximumLatency, policy.ThrottleWhenFull, diff --git a/Sources/Runtime/Microsoft.Psi/Common/Interpolators/Interpolator{T}.cs b/Sources/Runtime/Microsoft.Psi/Common/Interpolators/Interpolator{T}.cs new file mode 100644 index 000000000..850f14f59 --- /dev/null +++ b/Sources/Runtime/Microsoft.Psi/Common/Interpolators/Interpolator{T}.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi +{ + using System; + + /// + /// Defines a stream interpolator with the same input and output type. + /// + /// The type of the input messages and of the result. + public abstract class Interpolator : Interpolator + { + /// + /// Implicitly convert relative time intervals to the equivalent of a reproducible nearest match within that window. + /// + /// Window within which to match messages. + public static implicit operator Interpolator(RelativeTimeInterval window) => Reproducible.Nearest(window); + + /// + /// Implicitly convert timespan to the equivalent of a reproducible nearest match with that tolerance. + /// + /// Relative window tolerance within which to match messages. + public static implicit operator Interpolator(TimeSpan tolerance) => Reproducible.Nearest(tolerance); + } +} diff --git a/Sources/Runtime/Microsoft.Psi/Common/Interpolators/ReproducibleInterpolator{T}.cs b/Sources/Runtime/Microsoft.Psi/Common/Interpolators/ReproducibleInterpolator{T}.cs index 09ce661da..bce8c2b81 100644 --- a/Sources/Runtime/Microsoft.Psi/Common/Interpolators/ReproducibleInterpolator{T}.cs +++ b/Sources/Runtime/Microsoft.Psi/Common/Interpolators/ReproducibleInterpolator{T}.cs @@ -14,24 +14,18 @@ namespace Microsoft.Psi /// these interpolators might introduce extra delays as they might have to wait for enough messages on the /// secondary stream to prove that the interpolation result is correct, irrespective of any other messages /// that might arrive later. - public abstract class ReproducibleInterpolator : ReproducibleInterpolator + public abstract class ReproducibleInterpolator : Interpolator { /// /// Implicitly convert relative time intervals to the equivalent of a reproducible nearest match within that window. /// /// Window within which to match messages. - public static implicit operator ReproducibleInterpolator(RelativeTimeInterval window) - { - return Reproducible.Nearest(window); - } + public static implicit operator ReproducibleInterpolator(RelativeTimeInterval window) => Reproducible.Nearest(window); /// /// Implicitly convert timespan to the equivalent of a reproducible nearest match with that tolerance. /// /// Relative window tolerance within which to match messages. - public static implicit operator ReproducibleInterpolator(TimeSpan tolerance) - { - return Reproducible.Nearest(tolerance); - } + public static implicit operator ReproducibleInterpolator(TimeSpan tolerance) => Reproducible.Nearest(tolerance); } } diff --git a/Sources/Runtime/Microsoft.Psi/Common/Platform.cs b/Sources/Runtime/Microsoft.Psi/Common/Platform.cs index 48139e21a..27c51a3ad 100644 --- a/Sources/Runtime/Microsoft.Psi/Common/Platform.cs +++ b/Sources/Runtime/Microsoft.Psi/Common/Platform.cs @@ -5,8 +5,11 @@ namespace Microsoft.Psi { using System; using System.Diagnostics; + using System.IO; using System.Runtime.InteropServices; + using System.Text; using System.Threading; + using Microsoft.Win32.SafeHandles; /// /// Internal class to hold native P/Invoke methods. @@ -36,24 +39,33 @@ internal interface IThreading void SetApartmentState(Thread thread, ApartmentState state); } + internal interface IFileHelper + { + bool CanOpenFile(string filePath); + } + #pragma warning restore SA1600 // Elements must be documented public static class Specific { private static readonly IHighResolutionTime PlatformHighResolutionTime; private static readonly IThreading PlatformThreading; + private static readonly IFileHelper FileHelper; static Specific() { if (Environment.OSVersion.Platform == PlatformID.Win32NT) { + // Windows high-resolution timer APIs (e.g. TimeSetEvent in winmm.dll) are unavaliable on ARM PlatformHighResolutionTime = new Windows.HighResolutionTime(); PlatformThreading = new Windows.Threading(); + FileHelper = new Windows.FileHelper(); } else { PlatformHighResolutionTime = new Standard.HighResolutionTime(); PlatformThreading = new Standard.Threading(); + FileHelper = new Standard.FileHelper(); } } @@ -81,6 +93,11 @@ internal static long TimeFrequency() { return PlatformHighResolutionTime.TimeFrequency(); } + + internal static bool CanOpenFile(string filePath) + { + return FileHelper.CanOpenFile(filePath); + } } private static class Windows @@ -111,6 +128,8 @@ public void Stop() internal sealed class HighResolutionTime : IHighResolutionTime { + private bool isArm = RuntimeInformation.OSArchitecture == Architecture.Arm || RuntimeInformation.OSArchitecture == Architecture.Arm64; + public long TimeStamp() { long time; @@ -142,7 +161,10 @@ public long SystemTime() public ITimer TimerStart(uint delay, Time.TimerDelegate handler, bool periodic) { - return new Timer(delay, handler, periodic); + return + this.isArm ? + new Standard.Timer(delay, handler, periodic) : // TimeSet/KillEvent API unavailable on ARM + new Timer(delay, handler, periodic); } } @@ -154,6 +176,27 @@ public void SetApartmentState(Thread thread, ApartmentState state) } } + internal sealed class FileHelper : IFileHelper + { + public bool CanOpenFile(string filePath) + { + // Try to open the marker file using Win32 api so that we don't + // get an exception if the writer still has exclusive access. + SafeFileHandle fileHandle = NativeMethods.CreateFile( + filePath, + 0x80000000, // GENERIC_READ + (uint)FileShare.Read, + IntPtr.Zero, + (uint)FileMode.Open, + 0, + IntPtr.Zero); + + bool canOpenFile = !fileHandle.IsInvalid; + fileHandle.Dispose(); + return canOpenFile; + } + } + private static class NativeMethods { [DllImport("winmm.dll", SetLastError = true, EntryPoint = "timeSetEvent")] @@ -170,6 +213,16 @@ private static class NativeMethods [DllImport("kernel32.dll")] internal static extern bool QueryPerformanceFrequency(out long frequency); + + [DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode)] + internal static extern SafeFileHandle CreateFile( + string fileName, + uint desiredAccess, + uint shareMode, + IntPtr securityAttributes, + uint creationDisposition, + uint flagsAndAttributes, + IntPtr templateFile); } } @@ -291,6 +344,39 @@ public void SetApartmentState(Thread thread, ApartmentState state) // do nothing (COM feature) } } + + internal sealed class FileHelper : IFileHelper + { + public bool CanOpenFile(string filePath) + { + // Encode the file path to a null terminated UTF8 string + byte[] encodedBytes = new byte[Encoding.UTF8.GetByteCount(filePath) + 1]; + Encoding.UTF8.GetBytes(filePath, 0, filePath.Length, encodedBytes, 0); + + // Try to open the file + int fileDescriptor = NativeMethods.Open(encodedBytes, 0); + + // If a valid file descriptor is returned (not -1), then the file was successfully opened. + bool canOpenFile = fileDescriptor > -1; + + // Close the file if we managed to open it + if (canOpenFile) + { + NativeMethods.Close(fileDescriptor); + } + + return canOpenFile; + } + } + + private static class NativeMethods + { + [DllImport("libc", SetLastError = true, EntryPoint = "open")] + public static extern int Open([MarshalAs(UnmanagedType.LPArray, ArraySubType = UnmanagedType.U1)] byte[] fileNameAsUtf8ByteArray, int flags); + + [DllImport("libc", SetLastError = true, EntryPoint = "close")] + public static extern int Close(int fileDescriptor); + } } } } diff --git a/Sources/Runtime/Microsoft.Psi/Common/RuntimeInfo.cs b/Sources/Runtime/Microsoft.Psi/Common/RuntimeInfo.cs index 6957ff07f..d068f6a9c 100644 --- a/Sources/Runtime/Microsoft.Psi/Common/RuntimeInfo.cs +++ b/Sources/Runtime/Microsoft.Psi/Common/RuntimeInfo.cs @@ -34,7 +34,7 @@ public class RuntimeInfo : Metadata /// /// Gets the current runtime info. /// - public static readonly RuntimeInfo Current = new RuntimeInfo(); + public static readonly RuntimeInfo Current = new (); internal RuntimeInfo(int serializationSystemVersion = CurrentRuntimeVersion) : this( diff --git a/Sources/Runtime/Microsoft.Psi/Common/Shared.cs b/Sources/Runtime/Microsoft.Psi/Common/Shared.cs index 8c2b2c178..05149b3e6 100644 --- a/Sources/Runtime/Microsoft.Psi/Common/Shared.cs +++ b/Sources/Runtime/Microsoft.Psi/Common/Shared.cs @@ -115,7 +115,7 @@ private Shared() this.inner.Release(); #if TRACKLEAKS - StringBuilder sb = new StringBuilder("\\psi output **********************************************"); + var sb = new StringBuilder("\\psi output **********************************************"); sb.AppendLine(); sb.AppendLine($"A shared resource of type {typeof(T).FullName} was not explicitly released and has been garbage-collected. It should be released by calling Dispose instead."); if (this.constructorStackTrace != null) @@ -175,17 +175,16 @@ public void Dispose() /// Shared resource. public Shared AddRef() { - Shared sh = new Shared(); - sh.inner = this.inner; + var shared = new Shared + { + inner = this.inner, + }; this.inner.AddRef(); - return sh; + return shared; } /// - public override string ToString() - { - return this.ToString(string.Empty, CultureInfo.CurrentCulture); - } + public override string ToString() => this.ToString(string.Empty, CultureInfo.CurrentCulture); /// public string ToString(string format, IFormatProvider formatProvider) @@ -212,6 +211,9 @@ private class CustomSerializer : ISerializer> public const int Version = 2; private SerializationHandler> handler; + /// + public bool? IsClearRequired => true; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { this.handler = serializers.GetHandler>(); diff --git a/Sources/Runtime/Microsoft.Psi/Common/SharedContainer.cs b/Sources/Runtime/Microsoft.Psi/Common/SharedContainer.cs index e2bf60cc5..6ac10201c 100644 --- a/Sources/Runtime/Microsoft.Psi/Common/SharedContainer.cs +++ b/Sources/Runtime/Microsoft.Psi/Common/SharedContainer.cs @@ -74,6 +74,9 @@ private class CustomSerializer : ISerializer> public const int Version = 2; private SerializationHandler handler; + /// + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { this.handler = serializers.GetHandler(); diff --git a/Sources/Runtime/Microsoft.Psi/Common/UnmanagedArray.cs b/Sources/Runtime/Microsoft.Psi/Common/UnmanagedArray.cs index 976ec8865..4852e48f6 100644 --- a/Sources/Runtime/Microsoft.Psi/Common/UnmanagedArray.cs +++ b/Sources/Runtime/Microsoft.Psi/Common/UnmanagedArray.cs @@ -527,6 +527,9 @@ private class CustomSerializer : ISerializer> { private const int Version = 1; + /// + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { serializers.GetHandler(); // register element type diff --git a/Sources/Runtime/Microsoft.Psi/Common/UnmanagedBuffer.cs b/Sources/Runtime/Microsoft.Psi/Common/UnmanagedBuffer.cs index f3c1a9739..5a8fc09f4 100644 --- a/Sources/Runtime/Microsoft.Psi/Common/UnmanagedBuffer.cs +++ b/Sources/Runtime/Microsoft.Psi/Common/UnmanagedBuffer.cs @@ -274,6 +274,9 @@ private class CustomSerializer : ISerializer { public const int Version = 2; + /// + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { serializers.GetHandler(); // register element type diff --git a/Sources/Runtime/Microsoft.Psi/Components/Aggregator.cs b/Sources/Runtime/Microsoft.Psi/Components/Aggregator.cs index 3ff776367..107d2b6a4 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/Aggregator.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/Aggregator.cs @@ -19,7 +19,7 @@ public class Aggregator : ConsumerProducer, IDispo /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Initial state. /// Aggregation function. public Aggregator(Pipeline pipeline, TState init, Func, TState> aggregator) diff --git a/Sources/Runtime/Microsoft.Psi/Components/AsyncConsumerProducer.cs b/Sources/Runtime/Microsoft.Psi/Components/AsyncConsumerProducer.cs index 8de93056a..5dcadbeb1 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/AsyncConsumerProducer.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/AsyncConsumerProducer.cs @@ -16,7 +16,7 @@ public abstract class AsyncConsumerProducer : IConsumerProducer /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. public AsyncConsumerProducer(Pipeline pipeline) { this.Out = pipeline.CreateEmitter(this, nameof(this.Out)); diff --git a/Sources/Runtime/Microsoft.Psi/Components/ConsumerProducer.cs b/Sources/Runtime/Microsoft.Psi/Components/ConsumerProducer.cs index cc4b90e5f..f8be5e949 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/ConsumerProducer.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/ConsumerProducer.cs @@ -16,7 +16,7 @@ public abstract class ConsumerProducer : IConsumerProducer /// /// Initializes a new instance of the class. /// - /// The pipeline to attach to. + /// The pipeline to add the component to. public ConsumerProducer(Pipeline pipeline) { this.Out = pipeline.CreateEmitter(this, nameof(this.Out)); diff --git a/Sources/Runtime/Microsoft.Psi/Components/EventSource.cs b/Sources/Runtime/Microsoft.Psi/Components/EventSource.cs index ca2ad2791..12a223d1c 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/EventSource.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/EventSource.cs @@ -25,7 +25,7 @@ public class EventSource : IProducer, ISourceComponen /// action delegate into an event handler compatible with the external /// event that is being subscribed to. /// - /// The Psi pipeline. + /// The pipeline to add the component to. /// The delegate that subscribes to the external event. /// The delegate that unsubscribes from the external event. /// diff --git a/Sources/Runtime/Microsoft.Psi/Components/Fuse.cs b/Sources/Runtime/Microsoft.Psi/Components/Fuse.cs index bd994f91d..cce4de8d2 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/Fuse.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/Fuse.cs @@ -34,7 +34,7 @@ public class Fuse : IProducer /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Interpolator to use when joining the streams. /// Mapping function from messages to output. /// Number of secondary streams. diff --git a/Sources/Runtime/Microsoft.Psi/Components/Generator.cs b/Sources/Runtime/Microsoft.Psi/Components/Generator.cs index 8ffecca6f..c5ef1e331 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/Generator.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/Generator.cs @@ -39,7 +39,7 @@ public abstract class Generator : ISourceComponent /// /// Initializes a new instance of the class. /// - /// The pipeline to attach to. + /// The pipeline to add the component to. /// If true, mark this Generator instance as representing an infinite source (e.g., a live-running sensor). /// If false (default), it represents a finite source (e.g., Generating messages based on a finite file or IEnumerable). public Generator(Pipeline pipeline, bool isInfiniteSource = false) diff --git a/Sources/Runtime/Microsoft.Psi/Components/Generator{T}.cs b/Sources/Runtime/Microsoft.Psi/Components/Generator{T}.cs index 211437eba..c85952710 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/Generator{T}.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/Generator{T}.cs @@ -23,7 +23,7 @@ public class Generator : Generator, IProducer, IDisposable /// /// Initializes a new instance of the class. /// - /// The pipeline to attach to. + /// The pipeline to add the component to. /// A lazy enumerator of data. /// The interval used to increment time on each generated message. /// If non-null, this parameter specifies a time to align the generator messages with. If the parameter @@ -38,7 +38,7 @@ public Generator(Pipeline pipeline, IEnumerator enumerator, TimeSpan interval /// /// Initializes a new instance of the class. /// - /// The pipeline to attach to. + /// The pipeline to add the component to. /// A lazy enumerator of data. /// The explicit start time of the data in the enumeration. Supply this parameter when the enumeration contains /// data values with absolute originating times (e.g. [value, time] pairs read from a file), and you want to propose a pipeline replay diff --git a/Sources/Runtime/Microsoft.Psi/Components/Join.cs b/Sources/Runtime/Microsoft.Psi/Components/Join.cs index 20c28c406..295fbedbd 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/Join.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/Join.cs @@ -18,7 +18,7 @@ public class Join : Fuse /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Reproducible interpolator to use when joining the streams. /// Mapping function from message pair to output. /// Number of secondary streams. diff --git a/Sources/Runtime/Microsoft.Psi/Components/Join{TPrimary,TSecondary,TOut}.cs b/Sources/Runtime/Microsoft.Psi/Components/Join{TPrimary,TSecondary,TOut}.cs index f90b33daf..836a6bd6b 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/Join{TPrimary,TSecondary,TOut}.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/Join{TPrimary,TSecondary,TOut}.cs @@ -12,19 +12,19 @@ namespace Microsoft.Psi.Components /// The type the messages on the primary stream. /// The type messages on the secondary stream. /// The type of output message. - public class Join : Join + public class Join : Fuse { /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Reproducible interpolator to use when joining the streams. /// Mapping function from message pair to output. /// Number of secondary streams. /// Selector function mapping primary messages to secondary stream indices. public Join( Pipeline pipeline, - ReproducibleInterpolator interpolator, + ReproducibleInterpolator interpolator, Func outputCreator, int secondaryCount = 1, Func> secondarySelector = null) diff --git a/Sources/Runtime/Microsoft.Psi/Components/Merge.cs b/Sources/Runtime/Microsoft.Psi/Components/Merge.cs index 3098c0ea4..89056fa6f 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/Merge.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/Merge.cs @@ -15,7 +15,7 @@ public class Merge : IProducer> /// /// Initializes a new instance of the class. /// - /// The pipeline to which to attach. + /// The pipeline to add the component to. public Merge(Pipeline pipeline) { this.pipeline = pipeline; diff --git a/Sources/Runtime/Microsoft.Psi/Components/Merger.cs b/Sources/Runtime/Microsoft.Psi/Components/Merger.cs index 899a73f35..8e083dc5a 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/Merger.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/Merger.cs @@ -21,7 +21,7 @@ public class Merger /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Action invoked for each key/message. public Merger(Pipeline pipeline, Action> action) { diff --git a/Sources/Runtime/Microsoft.Psi/Components/Pair.cs b/Sources/Runtime/Microsoft.Psi/Components/Pair.cs index 5c92f84f8..1df1bdb8c 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/Pair.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/Pair.cs @@ -20,7 +20,7 @@ public class Pair : IProducer /// /// Initializes a new instance of the class. /// - /// The pipeline to which to attach. + /// The pipeline to add the component to. /// Mapping function from primary/secondary stream values to output type. public Pair( Pipeline pipeline, @@ -36,7 +36,7 @@ public class Pair : IProducer /// /// Initializes a new instance of the class. /// - /// The pipeline to which to attach. + /// The pipeline to add the component to. /// Mapping function from primary/secondary stream values to output type. /// An initial secondary value to be used until the first message arrives on the secondary stream. public Pair( diff --git a/Sources/Runtime/Microsoft.Psi/Components/ParallelFixedLength.cs b/Sources/Runtime/Microsoft.Psi/Components/ParallelFixedLength.cs index 36f4a05c7..91bbafb0f 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/ParallelFixedLength.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/ParallelFixedLength.cs @@ -23,7 +23,7 @@ public class ParallelFixedLength : Subpipeline, IConsumer, IPr /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Vector size. /// Action to apply to output producers. /// Name for this component (defaults to ParallelFixedLength). @@ -48,7 +48,7 @@ public ParallelFixedLength(Pipeline pipeline, int vectorSize, Action /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Vector size. /// Function mapping keyed input producers to output producers. /// When true, a result is produced even if a message is dropped in processing one of the input elements. In this case the corresponding output element is set to a default value. diff --git a/Sources/Runtime/Microsoft.Psi/Components/ParallelSparseDo.cs b/Sources/Runtime/Microsoft.Psi/Components/ParallelSparseDo.cs index 168d96471..98d86de29 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/ParallelSparseDo.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/ParallelSparseDo.cs @@ -22,7 +22,7 @@ public class ParallelSparseDo : Subpipeline, IConsum /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// A function that generates a dictionary of key-value pairs for each given input message. /// Action to perform in parallel. /// Predicate function determining whether and when (originating time) to terminate branches (defaults to when key no longer present), given the current key, message payload (dictionary) and originating time. diff --git a/Sources/Runtime/Microsoft.Psi/Components/ParallelSparseSelect.cs b/Sources/Runtime/Microsoft.Psi/Components/ParallelSparseSelect.cs index 5228cfdc0..2d73d114a 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/ParallelSparseSelect.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/ParallelSparseSelect.cs @@ -26,12 +26,12 @@ public class ParallelSparseSelect private readonly Pipeline pipeline; private readonly Connector inConnector; private readonly Connector outConnector; - private readonly Join, TBranchOut, TBranchOut, TOut> join; + private readonly Join, TBranchOut, TOut> join; /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// A function that splits the input by generating a dictionary of key-value pairs for each given input message. /// Function mapping keyed input producers to output producers. /// When true, a result is produced even if a message is dropped in processing one of the input elements. In this case the corresponding output element is set to a default value. @@ -60,9 +60,9 @@ public class ParallelSparseSelect var interpolator = outputDefaultIfDropped ? Reproducible.ExactOrDefault(defaultValue) : Reproducible.Exact(); var buffer = new Dictionary(); - this.join = Operators.Join( - parallelSparseSplitter.ActiveBranches, - Enumerable.Empty>(), + + this.join = new Join, TBranchOut, TOut>( + parallelSparseSplitter.ActiveBranches.Pipeline, interpolator, (keys, values) => { @@ -73,7 +73,11 @@ public class ParallelSparseSelect } return outputCreator(buffer); - }); + }, + 0, + keys => keys.Select(p => p.Value)); + + parallelSparseSplitter.ActiveBranches.PipeTo(this.join.InPrimary); this.outConnector = this.CreateOutputConnectorTo(pipeline, nameof(this.outConnector)); this.join.PipeTo(this.outConnector); diff --git a/Sources/Runtime/Microsoft.Psi/Components/ParallelVariableLength.cs b/Sources/Runtime/Microsoft.Psi/Components/ParallelVariableLength.cs index 0fcc6c350..b141834a7 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/ParallelVariableLength.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/ParallelVariableLength.cs @@ -18,7 +18,7 @@ public class ParallelVariableLength : Subpipeline, IConsumer, private readonly Connector inConnector; private readonly Connector outConnector; private readonly Receiver splitter; - private readonly List> branches = new List>(); + private readonly List> branches = new (); private readonly Join join; private readonly Emitter activeBranchesEmitter; private readonly Func, IProducer> parallelTransform; @@ -27,7 +27,7 @@ public class ParallelVariableLength : Subpipeline, IConsumer, /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Function mapping keyed input producers to output producers. /// Name for this component (defaults to ParallelVariableLength). /// Pipeline-level default delivery policy to be used by this component (defaults to if unspecified). @@ -43,7 +43,7 @@ public ParallelVariableLength(Pipeline pipeline, Action> act /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Function mapping keyed input producers to output producers. /// When true, a result is produced even if a message is dropped in processing one of the input elements. In this case the corresponding output element is set to a default value. /// Default value to use when messages are dropped in processing one of the input elements. @@ -94,7 +94,7 @@ private void Receive(TIn[] message, Envelope e) { var branchResult = this.parallelTransform(i, connectorIn.Out); var connectorOut = new Connector(subpipeline, this, $"connectorOut{i}"); - branchResult.PipeTo(connectorOut.In, true); + branchResult.PipeTo(connectorOut, true); connectorOut.Out.PipeTo(this.join.AddInput(), true); } else diff --git a/Sources/Runtime/Microsoft.Psi/Components/Processor.cs b/Sources/Runtime/Microsoft.Psi/Components/Processor.cs index e7fbbbb4e..bf4e0a52a 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/Processor.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/Processor.cs @@ -18,17 +18,22 @@ namespace Microsoft.Psi.Components /// The result type. public class Processor : ConsumerProducer { - private Action> transform; + private readonly Action> transform; /// /// Initializes a new instance of the class. /// - /// The pipeline to attach to. + /// The pipeline to add the component to. /// A delegate that processes the input data and potentially publishes a result on the provided . - public Processor(Pipeline pipeline, Action> transform) + /// An optional action to execute when the input stream closes. + public Processor(Pipeline pipeline, Action> transform, Action> onClose = null) : base(pipeline) { this.transform = transform; + if (onClose != null) + { + this.In.Unsubscribed += closingTime => onClose(closingTime, this.Out); + } } /// diff --git a/Sources/Runtime/Microsoft.Psi/Components/RelativeIndexWindow.cs b/Sources/Runtime/Microsoft.Psi/Components/RelativeIndexWindow.cs index 719e6ebda..d850e01b3 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/RelativeIndexWindow.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/RelativeIndexWindow.cs @@ -27,7 +27,7 @@ public class RelativeIndexWindow : ConsumerProducer /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// The relative index interval over which to gather messages. /// Select output message from collected window of input messages. public RelativeIndexWindow(Pipeline pipeline, IntInterval relativeIndexInterval, Func>, TOutput> selector) diff --git a/Sources/Runtime/Microsoft.Psi/Components/RelativeTimeWindow.cs b/Sources/Runtime/Microsoft.Psi/Components/RelativeTimeWindow.cs index 5272f3021..9e2dfd80d 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/RelativeTimeWindow.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/RelativeTimeWindow.cs @@ -25,7 +25,7 @@ public class RelativeTimeWindow : ConsumerProducer /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// The relative time interval over which to gather messages. /// Select output message from collected window of input messages. public RelativeTimeWindow(Pipeline pipeline, RelativeTimeInterval relativeTimeInterval, Func>, TOutput> selector) diff --git a/Sources/Runtime/Microsoft.Psi/Components/SerializerComponent.cs b/Sources/Runtime/Microsoft.Psi/Components/SerializerComponent.cs index 2b08deaaf..b7dd9a93f 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/SerializerComponent.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/SerializerComponent.cs @@ -19,7 +19,7 @@ internal sealed class SerializerComponent : ConsumerProducer, Mess /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Known serializers. internal SerializerComponent(Pipeline pipeline, KnownSerializers serializers) : base(pipeline) diff --git a/Sources/Runtime/Microsoft.Psi/Components/SimpleConsumer.cs b/Sources/Runtime/Microsoft.Psi/Components/SimpleConsumer.cs index cf0c7c5cd..89dfc5e08 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/SimpleConsumer.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/SimpleConsumer.cs @@ -12,7 +12,7 @@ public abstract class SimpleConsumer : IConsumer /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. public SimpleConsumer(Pipeline pipeline) { this.In = pipeline.CreateReceiver(this, this.Receive, nameof(this.In)); diff --git a/Sources/Runtime/Microsoft.Psi/Components/Splitter.cs b/Sources/Runtime/Microsoft.Psi/Components/Splitter.cs index f7b7a8c27..66bf2291f 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/Splitter.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/Splitter.cs @@ -21,7 +21,7 @@ public class Splitter : IConsumer /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Selector function identifying the output. public Splitter(Pipeline pipeline, Func outputSelector) { diff --git a/Sources/Runtime/Microsoft.Psi/Components/Timer{TOut}.cs b/Sources/Runtime/Microsoft.Psi/Components/Timer{TOut}.cs index 4ee4d0b6d..8bcd31747 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/Timer{TOut}.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/Timer{TOut}.cs @@ -16,7 +16,7 @@ public class Timer : Timer, IProducer /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Time interval with which to produce messages. /// Message generation function. public Timer(Pipeline pipeline, uint timerInterval, Func generator) diff --git a/Sources/Runtime/Microsoft.Psi/Components/Zip.cs b/Sources/Runtime/Microsoft.Psi/Components/Zip.cs index 71a1bba61..0a36bd243 100644 --- a/Sources/Runtime/Microsoft.Psi/Components/Zip.cs +++ b/Sources/Runtime/Microsoft.Psi/Components/Zip.cs @@ -22,7 +22,7 @@ public class Zip : IProducer /// /// Initializes a new instance of the class. /// - /// The pipeline to which to attach. + /// The pipeline to add the component to. public Zip(Pipeline pipeline) { this.pipeline = pipeline; diff --git a/Sources/Runtime/Microsoft.Psi/Connectors/Connector.cs b/Sources/Runtime/Microsoft.Psi/Connectors/Connector.cs index 6d1eb52e1..159d7f50e 100644 --- a/Sources/Runtime/Microsoft.Psi/Connectors/Connector.cs +++ b/Sources/Runtime/Microsoft.Psi/Connectors/Connector.cs @@ -18,8 +18,8 @@ public sealed class Connector : IProducer, IConsumer, IConnector /// /// The source pipeline. /// The target pipeline. - /// The name of the connector. - /// Whether or not the source message envelopes should be preserved. + /// An optional name for the connector. + /// An optional parameter that specifies whether or not the source message envelopes should be preserved. public Connector(Pipeline from, Pipeline to, string name = null, bool preserveEnvelope = false) { this.name = name ?? $"{from.Name}→{to.Name}"; @@ -32,9 +32,9 @@ public Connector(Pipeline from, Pipeline to, string name = null, bool preserveEn /// /// Initializes a new instance of the class. /// - /// The pipeline to create the connector in. - /// The name of the connector. - /// Whether or not the source message envelopes should be preserved. + /// The pipeline to add the component to. + /// An optional name for the connector. + /// An optional parameter that specifies whether or not the source message envelopes should be preserved. public Connector(Pipeline pipeline, string name = null, bool preserveEnvelope = false) : this(pipeline, pipeline, name ?? $"Connector-{pipeline.Name}", preserveEnvelope) { @@ -51,9 +51,6 @@ public Connector(Pipeline pipeline, string name = null, bool preserveEnvelope = public Emitter Out { get; } /// - public override string ToString() - { - return this.name; - } + public override string ToString() => this.name; } } \ No newline at end of file diff --git a/Sources/Runtime/Microsoft.Psi/Connectors/IConnector.cs b/Sources/Runtime/Microsoft.Psi/Connectors/IConnector.cs index db050f438..893f29000 100644 --- a/Sources/Runtime/Microsoft.Psi/Connectors/IConnector.cs +++ b/Sources/Runtime/Microsoft.Psi/Connectors/IConnector.cs @@ -3,14 +3,10 @@ namespace Microsoft.Psi.Components { - using System; - using System.Collections.Generic; - using System.Text; - /// /// Defines a marker interface for connectors, e.g. entities used to bridge pipelines. /// - internal interface IConnector + public interface IConnector { } } diff --git a/Sources/Runtime/Microsoft.Psi/Data/Exporter.cs b/Sources/Runtime/Microsoft.Psi/Data/Exporter.cs index ec05a54fa..9ec75a5bc 100644 --- a/Sources/Runtime/Microsoft.Psi/Data/Exporter.cs +++ b/Sources/Runtime/Microsoft.Psi/Data/Exporter.cs @@ -27,7 +27,7 @@ public abstract class Exporter : Subpipeline, IDisposable private readonly PsiStoreWriter writer; private readonly Merger, string> merger; private readonly Pipeline pipeline; - private readonly ManualResetEvent throttle = new ManualResetEvent(true); + private readonly ManualResetEvent throttle = new (true); private readonly KnownSerializers serializers; /// @@ -76,6 +76,11 @@ protected internal Exporter(Pipeline pipeline, string name, string path, bool cr /// public string Path => this.writer.Path; + /// + /// Gets stream metadata. + /// + public IEnumerable Metadata => this.writer.Metadata; + /// /// Gets the set of types that this Importer can deserialize. /// Types can be added or re-mapped using the method. @@ -219,16 +224,16 @@ internal void Write(Emitter source, string name, PsiStreamMe this.WriteToStorage(source, name, metadata.IsIndexed, deliveryPolicy).UpdateSupplementalMetadataFrom(metadata); } - internal void Write(Emitter> source, PsiStreamMetadata meta, DeliveryPolicy> deliveryPolicy = null) + internal void Write(Emitter> source, PsiStreamMetadata metadata, DeliveryPolicy> deliveryPolicy = null) { - var mergeInput = this.merger.Add(meta.Name); // this checks for duplicates + var mergeInput = this.merger.Add(metadata.Name); // this checks for duplicates var connector = this.CreateInputConnectorFrom>(source.Pipeline, null); source.PipeTo(connector); - source.Name ??= meta.Name; - connector.Out.Name = meta.Name; + source.Name ??= metadata.Name; + connector.Out.Name = metadata.Name; - this.writer.OpenStream(meta); + this.writer.OpenStream(metadata); // defaults to lossless delivery policy unless otherwise specified connector.PipeTo(mergeInput, true, deliveryPolicy ?? DeliveryPolicy.Unlimited); diff --git a/Sources/Runtime/Microsoft.Psi/Data/Importer.cs b/Sources/Runtime/Microsoft.Psi/Data/Importer.cs index 4364aa188..73ea3d68c 100644 --- a/Sources/Runtime/Microsoft.Psi/Data/Importer.cs +++ b/Sources/Runtime/Microsoft.Psi/Data/Importer.cs @@ -209,7 +209,7 @@ private IProducer BridgeOut(IProducer stream, string name) private class StreamImporter : ISourceComponent, IDisposable { private readonly IStreamReader streamReader; - private readonly Dictionary streams = new Dictionary(); + private readonly Dictionary streams = new (); private readonly Pipeline pipeline; private readonly KeyValueStore configurationStore; private readonly Receiver loopBack; diff --git a/Sources/Runtime/Microsoft.Psi/Data/PsiStore.cs b/Sources/Runtime/Microsoft.Psi/Data/PsiStore.cs index e4cdb3bbe..d76599dac 100644 --- a/Sources/Runtime/Microsoft.Psi/Data/PsiStore.cs +++ b/Sources/Runtime/Microsoft.Psi/Data/PsiStore.cs @@ -23,7 +23,7 @@ public static class PsiStore /// Creates a new multi-stream \psi store and returns an instance /// which can be used to write streams to this store. /// - /// The that owns the . + /// The pipeline to add the component to. /// The name of the store to create. /// The path to use. If null, an in-memory store is created. /// Indicates whether to create a numbered subdirectory for each execution of the pipeline. @@ -47,7 +47,7 @@ public static PsiExporter Create(Pipeline pipeline, string name, string rootPath /// which can be used to inspect the store and open the streams. /// The store metadata is available immediately after this call (before the pipeline is running) via the property. /// - /// The that owns the . + /// The pipeline to add the component to. /// The name of the store to open (the same as the catalog file name). /// /// The path to the store. @@ -465,7 +465,7 @@ public static bool TryGetStreamMetadata(IProducer source, out IStreamMetad /// /// Returns the metadata associated with the specified stream, if the stream is persisted to a \psi store. /// - /// The current pipeline. + /// The pipeline to add the component to. /// The name of the stream to retrieve metadata about. /// Upon return, this parameter contains the metadata associated with the stream, or null if the stream is not persisted. /// True if the stream is persisted to a store, false otherwise. diff --git a/Sources/Runtime/Microsoft.Psi/Data/PsiStoreStreamReader.cs b/Sources/Runtime/Microsoft.Psi/Data/PsiStoreStreamReader.cs index d4a8d1a7c..57893499d 100644 --- a/Sources/Runtime/Microsoft.Psi/Data/PsiStoreStreamReader.cs +++ b/Sources/Runtime/Microsoft.Psi/Data/PsiStoreStreamReader.cs @@ -17,10 +17,10 @@ namespace Microsoft.Psi.Data /// public sealed class PsiStoreStreamReader : IStreamReader { - private readonly Dictionary> targets = new Dictionary>(); - private readonly Dictionary>> errorHandlers = new Dictionary>>(); - private readonly Dictionary> outputs = new Dictionary>(); - private readonly Dictionary> indexOutputs = new Dictionary>(); + private readonly Dictionary> targets = new (); + private readonly Dictionary>> errorHandlers = new (); + private readonly Dictionary> outputs = new (); + private readonly Dictionary> indexOutputs = new (); private SerializationContext context; private byte[] buffer; @@ -113,7 +113,7 @@ public bool MoveNext(out Envelope envelope) /// public bool IsLive() { - return this.PsiStoreReader.IsMoreDataExpected(); + return PsiStoreMonitor.IsStoreLive(this.Name, this.Path); } /// @@ -178,7 +178,7 @@ public void ReadAll(ReplayDescriptor descriptor, CancellationToken cancelationTo { var result = true; this.PsiStoreReader.Seek(descriptor.Interval, true); - while (result || this.PsiStoreReader.IsMoreDataExpected()) + while (result || this.IsLive()) { if (cancelationToken.IsCancellationRequested) { diff --git a/Sources/Runtime/Microsoft.Psi/Diagnostics/DiagnosticsCollector.cs b/Sources/Runtime/Microsoft.Psi/Diagnostics/DiagnosticsCollector.cs index e4efb64b1..288f6b22b 100644 --- a/Sources/Runtime/Microsoft.Psi/Diagnostics/DiagnosticsCollector.cs +++ b/Sources/Runtime/Microsoft.Psi/Diagnostics/DiagnosticsCollector.cs @@ -84,7 +84,7 @@ public void PipelineDisposed(Pipeline pipeline) /// Element (representing component) created. /// /// Called upon element construction (first moment component becomes a pipeline element). - /// Pipeline to which the element belongs. + /// The pipeline to add the component to. /// Element being created. /// Component associated with this pipeline element. public void PipelineElementCreate(Pipeline pipeline, PipelineElement element, object component) @@ -118,7 +118,7 @@ public void PipelineElementCreate(Pipeline pipeline, PipelineElement element, ob /// Element (representing component) being started. /// /// Called after scheduling calls to start handler. - /// Pipeline to which the element belongs. + /// The pipeline to add the component to. /// Element being started. public void PipelineElementStart(Pipeline pipeline, PipelineElement element) { @@ -129,7 +129,7 @@ public void PipelineElementStart(Pipeline pipeline, PipelineElement element) /// Element (representing component) being stopped. /// /// Called after scheduling calls to stop handler. - /// Pipeline to which the element belongs. + /// The pipeline to add the component to. /// Element being stopped. public void PipelineElementStop(Pipeline pipeline, PipelineElement element) { @@ -140,7 +140,7 @@ public void PipelineElementStop(Pipeline pipeline, PipelineElement element) /// Element (representing component) being finalized. /// /// Called after scheduling calls to final handler. - /// Pipeline to which the element belongs. + /// The pipeline to add the component to. /// Element being finalized. public void PipelineElementFinal(Pipeline pipeline, PipelineElement element) { @@ -151,7 +151,7 @@ public void PipelineElementFinal(Pipeline pipeline, PipelineElement element) /// Element (representing component) created. /// /// Called upon element disposal. - /// Pipeline to which the element belongs. + /// The pipeline to add the component to. /// Element being created. public void PipelineElementDisposed(Pipeline pipeline, PipelineElement element) { @@ -162,7 +162,7 @@ public void PipelineElementDisposed(Pipeline pipeline, PipelineElement element) /// Output (emitter) added to element. /// /// Called just after element start (or dynamically if added once pipeline running). - /// Pipeline to which the element belongs. + /// The pipeline to add the component to. /// Element to which emitter is being added. /// Emitter being added. public void PipelineElementAddEmitter(Pipeline pipeline, PipelineElement element, IEmitter emitter) @@ -190,7 +190,7 @@ public void EmitterRenamed(IEmitter emitter) /// Input (receiver) added to element. /// /// Called just after element start (or dynamically if added once pipeline running). - /// Pipeline to which the element belongs. + /// The pipeline to add the component to. /// Element to which receiver is being added. /// Receiver being added. public void PipelineElementAddReceiver(Pipeline pipeline, PipelineElement element, IReceiver receiver) @@ -203,7 +203,7 @@ public void PipelineElementAddReceiver(Pipeline pipeline, PipelineElement elemen /// Input subscribed to input. /// /// Called just after element start (or dynamically if subscribed once pipeline running). - /// Pipeline to which the element belongs. + /// The pipeline to add the component to. /// Element to which receiver belongs. /// Receiver subscribing to emitter. /// Emitter to which receiver is subscribing. @@ -221,7 +221,7 @@ public void PipelineElementReceiverSubscribe(Pipeline pipeline, PipelineElement /// Input unsubscribed to input. /// /// Called upon unsubscribe (only if pipeline running). - /// Pipeline to which the element belongs. + /// The pipeline to add the component to. /// Element to which receiver belongs. /// Receiver unsubscribing to emitter. /// Emitter from which receiver is unsubscribing. @@ -234,7 +234,7 @@ public void PipelineElementReceiverUnsubscribe(Pipeline pipeline, PipelineElemen /// /// Get collector of diagnostics message flow statistics for a single receiver. /// - /// Pipeline to which the element belongs. + /// The pipeline to add the component to. /// Element to which receiver belongs. /// Receiver having completed processing. public ReceiverCollector GetReceiverDiagnosticsCollector(Pipeline pipeline, PipelineElement element, IReceiver receiver) diff --git a/Sources/Runtime/Microsoft.Psi/Diagnostics/DiagnosticsSampler.cs b/Sources/Runtime/Microsoft.Psi/Diagnostics/DiagnosticsSampler.cs index d51d09925..40589f12b 100644 --- a/Sources/Runtime/Microsoft.Psi/Diagnostics/DiagnosticsSampler.cs +++ b/Sources/Runtime/Microsoft.Psi/Diagnostics/DiagnosticsSampler.cs @@ -20,7 +20,7 @@ internal class DiagnosticsSampler : ISourceComponent, IDisposable /// /// Initializes a new instance of the class. /// - /// Pipeline to which this component belongs. + /// The pipeline to add the component to. /// Diagnostics collector. /// Diagnostics configuration. public DiagnosticsSampler(Pipeline pipeline, DiagnosticsCollector collector, DiagnosticsConfiguration config) diff --git a/Sources/Runtime/Microsoft.Psi/Diagnostics/PipelineDiagnostics.cs b/Sources/Runtime/Microsoft.Psi/Diagnostics/PipelineDiagnostics.cs index ec4ab2cc8..325873527 100644 --- a/Sources/Runtime/Microsoft.Psi/Diagnostics/PipelineDiagnostics.cs +++ b/Sources/Runtime/Microsoft.Psi/Diagnostics/PipelineDiagnostics.cs @@ -17,6 +17,31 @@ namespace Microsoft.Psi.Diagnostics /// public class PipelineDiagnostics { + /// + /// Initializes a new instance of the class. + /// + /// Pipeline ID. + /// Pipeline name. + /// Whether the pipeline is running (after started, before stopped). + /// Parent pipeline of this pipeline (it any). + /// Subpipelines of this pipeline. + /// Elements in this pipeline. + public PipelineDiagnostics( + int id, + string name, + bool isPipelineRunning, + PipelineDiagnostics parentPipelineDiagnostics, + PipelineDiagnostics[] subpipelineDiagnostics, + PipelineElementDiagnostics[] pipelineElements) + { + this.Id = id; + this.Name = name; + this.IsPipelineRunning = isPipelineRunning; + this.ParentPipelineDiagnostics = parentPipelineDiagnostics; + this.SubpipelineDiagnostics = subpipelineDiagnostics ?? new PipelineDiagnostics[0]; + this.PipelineElements = pipelineElements ?? new PipelineElementDiagnostics[0]; + } + /// /// Initializes a new instance of the class. /// @@ -62,19 +87,19 @@ private PipelineDiagnostics(PipelineDiagnosticsInternal pipelineDiagnosticsInter public bool IsPipelineRunning { get; private set; } /// - /// Gets elements in this pipeline. + /// Gets or sets elements in this pipeline. /// - public PipelineElementDiagnostics[] PipelineElements { get; private set; } + public PipelineElementDiagnostics[] PipelineElements { get; set; } /// - /// Gets parent pipeline of this pipeline (it any). + /// Gets or sets parent pipeline of this pipeline (it any). /// - public PipelineDiagnostics ParentPipelineDiagnostics { get; private set; } + public PipelineDiagnostics ParentPipelineDiagnostics { get; set; } /// - /// Gets subpipelines of this pipeline. + /// Gets or sets subpipelines of this pipeline. /// - public PipelineDiagnostics[] SubpipelineDiagnostics { get; private set; } + public PipelineDiagnostics[] SubpipelineDiagnostics { get; set; } /// /// Gets ancestor pipeline diagnostics. @@ -141,6 +166,49 @@ private void Initialize(PipelineDiagnosticsInternal pipelineDiagnosticsInternal, /// public class PipelineElementDiagnostics { + /// + /// Initializes a new instance of the class. + /// + /// Pipeline element ID. + /// Pipeline element name. + /// Pipeline element type name. + /// Pipeline element kind. + /// Whether the pipeline element is running (after started, before stopped). + /// Whether the pipeline element is finalized. + /// Diagnostic state for the pipeline element. + /// ID of pipeline to which this element belongs. + /// Pipeline element emitters. + /// Pipeline element receivers. + /// Pipeline which this element represents (e.g. Subpipeline). + /// Bridge to pipeline element in another pipeline (e.g. Connectors). + public PipelineElementDiagnostics( + int id, + string name, + string typeName, + PipelineElementKind kind, + bool isRunning, + bool finalized, + string diagnosticState, + int pipelineId, + EmitterDiagnostics[] emitters, + ReceiverDiagnostics[] receivers, + PipelineDiagnostics representsSubpipeline, + PipelineElementDiagnostics connectorBridgeToPipelineElement) + { + this.Id = id; + this.Name = name; + this.TypeName = typeName; + this.Kind = kind; + this.IsRunning = isRunning; + this.Finalized = finalized; + this.DiagnosticState = diagnosticState; + this.PipelineId = pipelineId; + this.Emitters = emitters; + this.Receivers = receivers; + this.RepresentsSubpipeline = representsSubpipeline; + this.ConnectorBridgeToPipelineElement = connectorBridgeToPipelineElement; + } + /// /// Initializes a new instance of the class. /// @@ -217,30 +285,30 @@ internal PipelineElementDiagnostics(PipelineDiagnosticsInternal.PipelineElementD public string DiagnosticState { get; } /// - /// Gets pipeline element emitters. + /// Gets ID of pipeline to which this element belongs. /// - public EmitterDiagnostics[] Emitters { get; } + public int PipelineId { get; } /// - /// Gets pipeline element receivers. + /// Gets or sets pipeline element emitters. /// - public ReceiverDiagnostics[] Receivers { get; } + public EmitterDiagnostics[] Emitters { get; set; } /// - /// Gets ID of pipeline to which this element belongs. + /// Gets or sets pipeline element receivers. /// - public int PipelineId { get; } + public ReceiverDiagnostics[] Receivers { get; set; } /// - /// Gets pipeline which this element represents (e.g. Subpipeline). + /// Gets or sets pipeline which this element represents (e.g. Subpipeline). /// /// This is used when a pipeline element is a pipeline (e.g. Subpipeline). - public PipelineDiagnostics RepresentsSubpipeline { get; private set; } + public PipelineDiagnostics RepresentsSubpipeline { get; set; } /// - /// Gets bridge to pipeline element in another pipeline (e.g. Connectors). + /// Gets or sets bridge to pipeline element in another pipeline (e.g. Connectors). /// - public PipelineElementDiagnostics ConnectorBridgeToPipelineElement { get; private set; } + public PipelineElementDiagnostics ConnectorBridgeToPipelineElement { get; set; } } /// @@ -248,6 +316,28 @@ internal PipelineElementDiagnostics(PipelineDiagnosticsInternal.PipelineElementD /// public class EmitterDiagnostics { + /// + /// Initializes a new instance of the class. + /// + /// Emitter ID. + /// Emitter name. + /// Emitter type. + /// Pipeline element to which emitter belongs. + /// Emitter target receivers. + public EmitterDiagnostics( + int id, + string name, + string type, + PipelineElementDiagnostics pipelineElement, + ReceiverDiagnostics[] targets) + { + this.Id = id; + this.Name = name; + this.Type = type; + this.PipelineElement = pipelineElement; + this.Targets = targets; + } + /// /// Initializes a new instance of the class. /// @@ -284,14 +374,14 @@ internal EmitterDiagnostics(PipelineDiagnosticsInternal.EmitterDiagnostics emitt public string Type { get; } /// - /// Gets pipeline element to which emitter belongs. + /// Gets or sets pipeline element to which emitter belongs. /// - public PipelineElementDiagnostics PipelineElement { get; private set; } + public PipelineElementDiagnostics PipelineElement { get; set; } /// - /// Gets emitter target receivers. + /// Gets or sets emitter target receivers. /// - public ReceiverDiagnostics[] Targets { get; private set; } + public ReceiverDiagnostics[] Targets { get; set; } } /// @@ -299,6 +389,88 @@ internal EmitterDiagnostics(PipelineDiagnosticsInternal.EmitterDiagnostics emitt /// public class ReceiverDiagnostics { + /// + /// Initializes a new instance of the class. + /// + /// Receiver ID. + /// Receiver name. + /// Name of delivery policy used by receiver. + /// Receiver type name. + /// Whether receiver is throttled. + /// Delivery queue size at last message. + /// Average delivery queue size. + /// Total count of emitted messages. + /// Count of emitted messages in last averaging time window. + /// Total count of processed messages. + /// Count of processed messages in last averaging time window. + /// Total count of dropped messages. + /// Count of dropped messages in last averaging time window. + /// Latency with which the last message was created. + /// Average message created latency in last averaging time window. + /// Latency with which the last message was emitted. + /// Average message emitted latency in last averaging time window. + /// Latency with which the last message was received. + /// Average message received latency in last averaging time window. + /// Receiver processing time for the last message. + /// Average receiver processing time in last averaging time window. + /// Message size for the last message. + /// Average message size over in last averaging time window. + /// Pipeline element to which emitter belongs. + /// Receiver's source emitter. + public ReceiverDiagnostics( + int id, + string receiverName, + string deliveryPolicyName, + string typeName, + bool receiverIsThrottled, + double lastDeliveryQueueSize, + double avgDeliveryQueueSize, + int totalMessageEmittedCount, + int windowMessageEmittedCount, + int totalMessageProcessedCount, + int windowMessageProcessedCount, + int totalMessageDroppedCount, + int windowMessageDroppedCount, + double lastMessageCreatedLatency, + double avgMessageCreatedLatency, + double lastMessageEmittedLatency, + double avgMessageEmittedLatency, + double lastMessageReceivedLatency, + double avgMessageReceivedLatency, + double lastMessageProcessTime, + double avgMessageProcessTime, + double lastMessageSize, + double avgMessageSize, + PipelineElementDiagnostics pipelineElement, + EmitterDiagnostics source) + { + this.Id = id; + this.ReceiverName = receiverName; + this.DeliveryPolicyName = deliveryPolicyName; + this.TypeName = typeName; + this.ReceiverIsThrottled = receiverIsThrottled; + this.LastDeliveryQueueSize = lastDeliveryQueueSize; + this.AvgDeliveryQueueSize = avgDeliveryQueueSize; + this.TotalMessageEmittedCount = totalMessageEmittedCount; + this.WindowMessageEmittedCount = windowMessageEmittedCount; + this.TotalMessageProcessedCount = totalMessageProcessedCount; + this.WindowMessageProcessedCount = windowMessageProcessedCount; + this.TotalMessageDroppedCount = totalMessageDroppedCount; + this.WindowMessageDroppedCount = windowMessageDroppedCount; + this.LastMessageCreatedLatency = lastMessageCreatedLatency; + this.AvgMessageCreatedLatency = avgMessageCreatedLatency; + this.LastMessageEmittedLatency = lastMessageEmittedLatency; + this.AvgMessageEmittedLatency = avgMessageEmittedLatency; + this.LastMessageReceivedLatency = lastMessageReceivedLatency; + this.AvgMessageReceivedLatency = avgMessageReceivedLatency; + this.LastMessageProcessTime = lastMessageProcessTime; + this.AvgMessageProcessTime = avgMessageProcessTime; + this.LastMessageSize = lastMessageSize; + this.AvgMessageSize = avgMessageSize; + this.PipelineElement = pipelineElement; + this.Source = source; + } + /// /// Initializes a new instance of the class. /// @@ -364,7 +536,7 @@ internal ReceiverDiagnostics(PipelineDiagnosticsInternal.ReceiverDiagnostics rec public string DeliveryPolicyName { get; } /// - /// Gets receiver type. + /// Gets receiver type name. /// public string TypeName { get; } @@ -464,14 +636,14 @@ internal ReceiverDiagnostics(PipelineDiagnosticsInternal.ReceiverDiagnostics rec public double AvgMessageSize { get; } /// - /// Gets pipeline element to which emitter belongs. + /// Gets or sets pipeline element to which emitter belongs. /// - public PipelineElementDiagnostics PipelineElement { get; private set; } + public PipelineElementDiagnostics PipelineElement { get; set; } /// - /// Gets receiver's source emitter. + /// Gets or sets receiver's source emitter. /// - public EmitterDiagnostics Source { get; private set; } + public EmitterDiagnostics Source { get; set; } } /// diff --git a/Sources/Runtime/Microsoft.Psi/Executive/Pipeline.cs b/Sources/Runtime/Microsoft.Psi/Executive/Pipeline.cs index f89164d36..2432512e6 100644 --- a/Sources/Runtime/Microsoft.Psi/Executive/Pipeline.cs +++ b/Sources/Runtime/Microsoft.Psi/Executive/Pipeline.cs @@ -234,6 +234,11 @@ private set /// public TimeSpan ProgressReportInterval { get; set; } = TimeSpan.FromMilliseconds(500); + /// + /// Gets or sets virtual time offset. + /// + internal virtual TimeSpan VirtualTimeOffset { get; set; } = TimeSpan.Zero; + internal bool IsInitial => this.state == State.Initial; internal bool IsStarting => this.state == State.Starting; @@ -907,8 +912,8 @@ protected virtual IDisposable RunAsync(ReplayDescriptor descriptor, Clock clock, // this is the main pipeline (subpipelines inherit the parent clock) clock = this.replayDescriptor.Interval.Left != DateTime.MinValue ? - new Clock(this.replayDescriptor.Start) : - new Clock(default(TimeSpan)); + new Clock(this.replayDescriptor.Start + this.VirtualTimeOffset) : + new Clock(this.VirtualTimeOffset); // start the scheduler this.scheduler.Start(clock, this.replayDescriptor.EnforceReplayClock); diff --git a/Sources/Runtime/Microsoft.Psi/Executive/Subpipeline.cs b/Sources/Runtime/Microsoft.Psi/Executive/Subpipeline.cs index 2ec6addd4..aaa5c0a92 100644 --- a/Sources/Runtime/Microsoft.Psi/Executive/Subpipeline.cs +++ b/Sources/Runtime/Microsoft.Psi/Executive/Subpipeline.cs @@ -43,6 +43,22 @@ public Subpipeline(Pipeline parent, string name = null, DeliveryPolicy defaultDe this.parentPipeline.GetOrCreateNode(this); } + /// + /// Gets or sets virtual time offset (delegated to ancestors). + /// + internal override TimeSpan VirtualTimeOffset + { + get + { + return this.ParentPipeline.VirtualTimeOffset; + } + + set + { + this.ParentPipeline.VirtualTimeOffset = value; + } + } + /// /// Gets the parent pipeline. /// diff --git a/Sources/Runtime/Microsoft.Psi/Operators/Connectors.cs b/Sources/Runtime/Microsoft.Psi/Operators/Connectors.cs index 034dc2159..6f1c3a1cb 100644 --- a/Sources/Runtime/Microsoft.Psi/Operators/Connectors.cs +++ b/Sources/Runtime/Microsoft.Psi/Operators/Connectors.cs @@ -11,19 +11,17 @@ namespace Microsoft.Psi public static partial class Operators { /// - /// Connects a stream producer to a stream consumer. As a result, all messages in the stream will be routed to the consumer for processing. + /// Connects a stream producer to a stream consumer. /// /// The type of messages in the stream. - /// The type of consumer. + /// The type of the consumer. /// The source stream to subscribe to. /// The consumer (subscriber). /// An optional delivery policy. - /// Consumer (subscriber). - public static TC PipeTo(this IProducer source, TC consumer, DeliveryPolicy deliveryPolicy = null) - where TC : IConsumer - { - return PipeTo(source, consumer, false, deliveryPolicy); - } + /// The consumer that was passed as input. + public static TConsumer PipeTo(this IProducer source, TConsumer consumer, DeliveryPolicy deliveryPolicy = null) + where TConsumer : IConsumer => + PipeTo(source, consumer, false, deliveryPolicy); /// /// Creates a connector that exposes the messages it receives as a stream rather than calling a delegate. @@ -33,10 +31,8 @@ public static partial class Operators /// The pipeline. /// The name of this connector. /// The newly created connector. - public static Connector CreateConnector(this Pipeline p, string name) - { - return new Connector(p, name); - } + public static Connector CreateConnector(this Pipeline p, string name) => + new (p, name); /// /// Creates a stream in a specified target pipeline, based on a given input stream (that may belong in a different pipeline). @@ -61,20 +57,20 @@ public static IProducer BridgeTo(this IProducer input, Pipeline targetP } /// - /// Connects a stream producer to a stream consumer. As a result, all messages in the stream will be routed to the consumer for processing. + /// Connects a stream producer to a stream consumer. /// /// /// This is an internal-only method which provides the option to allow connections between producers and consumers in running pipelines. /// /// The type of messages in the stream. - /// The type of consumer. + /// The type of the consumer. /// The source stream to subscribe to. /// The consumer (subscriber). /// An optional flag to allow connections in running pipelines. /// An optional delivery policy. - /// Consumer. - internal static TC PipeTo(this IProducer source, TC consumer, bool allowWhileRunning, DeliveryPolicy deliveryPolicy = null) - where TC : IConsumer + /// The consumer that was passed as input. + internal static TConsumer PipeTo(this IProducer source, TConsumer consumer, bool allowWhileRunning, DeliveryPolicy deliveryPolicy = null) + where TConsumer : IConsumer { source.Out.Subscribe(consumer.In, allowWhileRunning, deliveryPolicy ?? source.Out.Pipeline.GetDefaultDeliveryPolicy()); return consumer; diff --git a/Sources/Runtime/Microsoft.Psi/Operators/Fuses.cs b/Sources/Runtime/Microsoft.Psi/Operators/Fuses.cs index 6f2f4c804..303898caf 100644 --- a/Sources/Runtime/Microsoft.Psi/Operators/Fuses.cs +++ b/Sources/Runtime/Microsoft.Psi/Operators/Fuses.cs @@ -385,6 +385,66 @@ public static partial class Operators #region Vector fuse operators + /// + /// Fuses a primary stream with an enumeration of secondary streams based on a specified interpolator. + /// + /// Type of primary stream messages. + /// Type of secondary stream messages. + /// Type of output stream messages. + /// Primary stream. + /// Enumeration of secondary streams. + /// Interpolator to use when fusing the streams. + /// Mapping function from primary and secondary messages to output. + /// An optional delivery policy for the primary stream. + /// An optional delivery policy for the secondary stream(s). + /// Output stream. + public static IProducer Fuse( + this IProducer primary, + IEnumerable> secondaries, + Interpolator interpolator, + Func outputCreator, + DeliveryPolicy primaryDeliveryPolicy = null, + DeliveryPolicy secondariesDeliveryPolicy = null) + { + var fuse = new Fuse( + primary.Out.Pipeline, + interpolator, + outputCreator, + secondaries.Count(), + null); + + primary.PipeTo(fuse.InPrimary, primaryDeliveryPolicy); + + var i = 0; + foreach (var input in secondaries) + { + input.PipeTo(fuse.InSecondaries[i++], secondariesDeliveryPolicy); + } + + return fuse; + } + + /// + /// Fuses a primary stream with an enumeration of secondary streams based on a specified interpolator. + /// + /// Type of primary stream messages. + /// Type of secondary stream messages. + /// Primary stream. + /// Enumeration of secondary streams. + /// Interpolator to use when fusing the streams. + /// An optional delivery policy for the primary stream. + /// An optional delivery policy for the secondary stream(s). + /// Output stream. + public static IProducer<(TPrimary, TSecondary[])> Fuse( + this IProducer primary, + IEnumerable> secondaries, + Interpolator interpolator, + DeliveryPolicy primaryDeliveryPolicy = null, + DeliveryPolicy secondariesDeliveryPolicy = null) + { + return primary.Fuse(secondaries, interpolator, ValueTuple.Create, primaryDeliveryPolicy, secondariesDeliveryPolicy); + } + /// /// Fuses a primary stream with an enumeration of secondary streams based on a specified interpolator. /// @@ -426,30 +486,59 @@ public static partial class Operators } /// - /// Fuses an enumeration of streams into a vector stream, based on a specified interpolator. + /// Fuses a primary stream with an enumeration of secondary streams based on a specified interpolator. + /// + /// Type of primary stream messages. + /// Type of secondary stream messages. + /// Type of the interpolation result. + /// Primary stream. + /// Enumeration of secondary streams. + /// Interpolator to use when fusing the streams. + /// An optional delivery policy for the primary stream. + /// An optional delivery policy for the secondary stream(s). + /// Output stream. + public static IProducer<(TPrimary, TInterpolation[])> Fuse( + this IProducer primary, + IEnumerable> secondaries, + Interpolator interpolator, + DeliveryPolicy primaryDeliveryPolicy = null, + DeliveryPolicy secondariesDeliveryPolicy = null) + { + return primary.Fuse(secondaries, interpolator, (p, i) => (p, i), primaryDeliveryPolicy, secondariesDeliveryPolicy); + } + + /// + /// Fuses an enumeration of streams into a vector stream, based on a specified interpolator and output creator function. /// /// Type of input stream messages. + /// The type of output stream messages. /// Collection of input streams. /// Interpolator to use when fusing the streams. + /// Mapping function from input to output messages. /// An optional delivery policy to use for the streams. /// Output stream. - public static IProducer Fuse( + public static IProducer Fuse( this IEnumerable> inputs, - Interpolator interpolator, + Interpolator interpolator, + Func outputCreator, DeliveryPolicy deliveryPolicy = null) { var count = inputs.Count(); if (count > 1) { - var buffer = new TIn[count]; + var buffer = new TOut[count]; return Fuse( inputs.First(), inputs.Skip(1), interpolator, (m, secondaryArray) => { - buffer[0] = m; - Array.Copy(secondaryArray, 0, buffer, 1, count - 1); + buffer[0] = outputCreator(m); + for (int i = 1; i < count; i++) + { + buffer[i] = outputCreator(secondaryArray[i - 1]); + } + return buffer; }, deliveryPolicy, @@ -457,7 +546,7 @@ public static partial class Operators } else if (count == 1) { - return inputs.First().Select(x => new[] { x }, deliveryPolicy); + return inputs.First().Select(x => new[] { outputCreator(x) }, deliveryPolicy); } else { @@ -465,6 +554,22 @@ public static partial class Operators } } + /// + /// Fuses an enumeration of streams into a vector stream, based on a specified interpolator. + /// + /// Type of input stream messages. + /// Collection of input streams. + /// Interpolator to use when fusing the streams. + /// An optional delivery policy to use for the streams. + /// Output stream. + public static IProducer Fuse( + this IEnumerable> inputs, + Interpolator interpolator, + DeliveryPolicy deliveryPolicy = null) + { + return inputs.Fuse(interpolator, _ => _, deliveryPolicy); + } + #endregion Vector fuse operators } } \ No newline at end of file diff --git a/Sources/Runtime/Microsoft.Psi/Operators/Generators.cs b/Sources/Runtime/Microsoft.Psi/Operators/Generators.cs index 0e0edb0c9..49d3cbcd7 100644 --- a/Sources/Runtime/Microsoft.Psi/Operators/Generators.cs +++ b/Sources/Runtime/Microsoft.Psi/Operators/Generators.cs @@ -17,7 +17,7 @@ public static class Generators /// Generates a finite stream of values published at a regular interval from a user-provided function. /// /// The type of data in the sequence. - /// The pipeline that will run this generator. + /// The pipeline to add the component to. /// The initial value. /// The function that generates a new value based on the previous value. /// The number of messages to publish. @@ -37,7 +37,7 @@ public static IProducer Sequence(Pipeline pipeline, T initialValue, Func /// The type of data in the sequence. - /// The pipeline that will run this generator. + /// The pipeline to add the component to. /// The initial value. /// The function that generates a new value based on the previous value. /// The desired time interval between consecutive messages. Defaults to 1 tick. @@ -55,7 +55,7 @@ public static IProducer Sequence(Pipeline pipeline, T initialValue, Func /// The type of data in the sequence. - /// The pipeline that will run this generator. + /// The pipeline to add the component to. /// The sequence to publish. /// The desired time interval between consecutive messages. Defaults to 1 tick. /// If non-null, this parameter specifies a time to align the generator messages with. If the parameter @@ -73,7 +73,7 @@ public static IProducer Sequence(Pipeline pipeline, IEnumerable enumera /// Generates a stream of values from a specified enumerable that provides the values and corresponding originating times. /// /// The type of data in the sequence. - /// The pipeline that will run this generator. + /// The pipeline to add the component to. /// An enumerable sequence of (data, originating time) pairs. /// The explicit start time of the data in the enumeration. Supply this parameter when the enumeration contains /// data values with absolute originating times (e.g. [value, time] pairs read from a file), and you want to propose a pipeline replay @@ -91,7 +91,7 @@ public static IProducer Sequence(Pipeline pipeline, IEnumerable<(T, DateTi /// Generates stream containing a single message, and keeps the stream open afterwards. /// /// The type of value to publish. - /// The pipeline to attach to. + /// The pipeline to add the component to. /// The value to publish. /// A stream of values of type T. /// The generated stream stays open until the pipeline is shut down. @@ -104,7 +104,7 @@ public static IProducer Once(Pipeline pipeline, T value) /// Generates stream containing a single message, and closes the stream afterwards. /// /// The type of value to publish. - /// The pipeline to attach to. + /// The pipeline to add the component to. /// The value to publish. /// A stream containing one value of type T. /// The generated stream closes after the message is published. @@ -117,7 +117,7 @@ public static IProducer Return(Pipeline pipeline, T value) /// Generates a finite stream of constant values published at a regular interval. /// /// The type of data in the sequence. - /// The pipeline that will run this generator. + /// The pipeline to add the component to. /// The value to publish. /// The number of messages to publish. /// The desired time interval between consecutive messages. Defaults to 1 tick. @@ -136,7 +136,7 @@ public static IProducer Repeat(Pipeline pipeline, T value, int count, Time /// Generates an infinite stream of constant values published at a regular interval. /// /// The type of data in the sequence. - /// The pipeline that will run this generator. + /// The pipeline to add the component to. /// The value to publish. /// The desired time interval between consecutive messages. Defaults to 1 tick. /// If non-null, this parameter specifies a time to align the generator messages with. If the parameter @@ -152,7 +152,7 @@ public static IProducer Repeat(Pipeline pipeline, T value, TimeSpan interv /// /// Generates a stream of a finite range of integer values published at a regular interval. /// - /// The pipeline that will run this generator. + /// The pipeline to add the component to. /// The starting value. /// The number of messages to publish. /// The desired time interval between consecutive messages. Defaults to 1 tick. diff --git a/Sources/Runtime/Microsoft.Psi/Operators/Joins.cs b/Sources/Runtime/Microsoft.Psi/Operators/Joins.cs index 5c2179c02..07b562618 100644 --- a/Sources/Runtime/Microsoft.Psi/Operators/Joins.cs +++ b/Sources/Runtime/Microsoft.Psi/Operators/Joins.cs @@ -1362,7 +1362,6 @@ public static partial class Operators /// /// Type of primary stream messages. /// Type of secondary stream messages. - /// Type of the interpolation result. /// Type of output stream messages. /// Primary stream. /// Enumeration of secondary streams. @@ -1371,15 +1370,15 @@ public static partial class Operators /// An optional delivery policy for the primary stream. /// An optional delivery policy for the secondary stream(s). /// Output stream. - public static IProducer Join( + public static IProducer Join( this IProducer primary, IEnumerable> secondaries, - ReproducibleInterpolator interpolator, - Func outputCreator, + ReproducibleInterpolator interpolator, + Func outputCreator, DeliveryPolicy primaryDeliveryPolicy = null, DeliveryPolicy secondariesDeliveryPolicy = null) { - var join = new Join( + var join = new Join( primary.Out.Pipeline, interpolator, outputCreator, @@ -1398,162 +1397,151 @@ public static partial class Operators } /// - /// Joins an enumeration of streams into a vector stream, based on a specified reproducible interpolator. + /// Joins a primary stream with an enumeration of secondary streams based on a specified reproducible interpolator. /// - /// Type of input stream messages. - /// Collection of input streams. + /// Type of primary stream messages. + /// Type of secondary stream messages. + /// Primary stream. + /// Enumeration of secondary streams. /// Reproducible interpolator to use when joining the streams. - /// An optional delivery policy to use for the streams. + /// An optional delivery policy for the primary stream. + /// An optional delivery policy for the secondary stream(s). /// Output stream. - public static IProducer Join( - this IEnumerable> inputs, - ReproducibleInterpolator interpolator, - DeliveryPolicy deliveryPolicy = null) + public static IProducer<(TPrimary, TSecondary[])> Join( + this IProducer primary, + IEnumerable> secondaries, + ReproducibleInterpolator interpolator, + DeliveryPolicy primaryDeliveryPolicy = null, + DeliveryPolicy secondariesDeliveryPolicy = null) { - var count = inputs.Count(); - if (count > 1) - { - var buffer = new TIn[count]; - return Join( - inputs.First(), - inputs.Skip(1), - interpolator, - (m, secondaryArray) => - { - buffer[0] = m; - Array.Copy(secondaryArray, 0, buffer, 1, count - 1); - return buffer; - }, - deliveryPolicy, - deliveryPolicy); - } - else if (count == 1) - { - return inputs.First().Select(x => new[] { x }, deliveryPolicy); - } - else - { - throw new ArgumentException("Vector join with empty inputs collection."); - } + return primary.Join(secondaries, interpolator, ValueTuple.Create, primaryDeliveryPolicy, secondariesDeliveryPolicy); } /// - /// Joins a primary stream of integers with an enumeration of secondary streams based on a specified reproducible interpolator. + /// Joins a primary stream with an enumeration of secondary streams based on a specified reproducible interpolator. /// - /// Type of input messages. + /// Type of primary stream messages. + /// Type of secondary stream messages. /// Type of the interpolation result. + /// Type of output stream messages. /// Primary stream. - /// Collection of secondary streams. + /// Enumeration of secondary streams. /// Reproducible interpolator to use when joining the streams. + /// Mapping function from primary and secondary messages to output. /// An optional delivery policy for the primary stream. - /// An optional delivery policy for the secondary stream(s). + /// An optional delivery policy for the secondary stream(s). /// Output stream. - public static IProducer Join( - this IProducer primary, - IEnumerable> inputs, - ReproducibleInterpolator interpolator, - DeliveryPolicy primaryDeliveryPolicy = null, - DeliveryPolicy secondaryDeliveryPolicy = null) + public static IProducer Join( + this IProducer primary, + IEnumerable> secondaries, + ReproducibleInterpolator interpolator, + Func outputCreator, + DeliveryPolicy primaryDeliveryPolicy = null, + DeliveryPolicy secondariesDeliveryPolicy = null) { - var join = new Join( + var join = new Join( primary.Out.Pipeline, interpolator, - (count, values) => values, - inputs.Count(), - count => Enumerable.Range(0, count)); + outputCreator, + secondaries.Count(), + null); primary.PipeTo(join.InPrimary, primaryDeliveryPolicy); var i = 0; - foreach (var input in inputs) + foreach (var input in secondaries) { - input.PipeTo(join.InSecondaries[i++], secondaryDeliveryPolicy); + input.PipeTo(join.InSecondaries[i++], secondariesDeliveryPolicy); } return join; } - #endregion Vector joins - - #region Sparse vector (dictionary) joins - /// - /// Sparse vector join. + /// Joins a primary stream with an enumeration of secondary streams based on a specified reproducible interpolator. /// - /// Type of input messages. - /// Type of key values. + /// Type of primary stream messages. + /// Type of secondary stream messages. /// Type of the interpolation result. - /// The type of the output. /// Primary stream. - /// Collection of secondary streams. + /// Enumeration of secondary streams. /// Reproducible interpolator to use when joining the streams. - /// The output creator function. /// An optional delivery policy for the primary stream. - /// An optional delivery policy for the secondary stream(s). + /// An optional delivery policy for the secondary stream(s). /// Output stream. - public static Join, TIn, TInterpolation, TOutput> Join( - this IProducer> primary, - IEnumerable> inputs, - ReproducibleInterpolator interpolator, - Func, TInterpolation[], TOutput> outputCreator, - DeliveryPolicy> primaryDeliveryPolicy = null, - DeliveryPolicy secondaryDeliveryPolicy = null) + public static IProducer<(TPrimary, TInterpolation[])> Join( + this IProducer primary, + IEnumerable> secondaries, + ReproducibleInterpolator interpolator, + DeliveryPolicy primaryDeliveryPolicy = null, + DeliveryPolicy secondariesDeliveryPolicy = null) { - var join = new Join, TIn, TInterpolation, TOutput>( - primary.Out.Pipeline, - interpolator, - outputCreator, - inputs.Count(), - keys => keys.Select(p => p.Value)); + return primary.Join(secondaries, interpolator, (p, i) => (p, i), primaryDeliveryPolicy, secondariesDeliveryPolicy); + } - primary.PipeTo(join.InPrimary, primaryDeliveryPolicy); + /// + /// Joins an enumeration of streams into a vector stream, based on a specified reproducible interpolator and output creator function. + /// + /// Type of input stream messages. + /// The type of output stream messages. + /// Collection of input streams. + /// Reproducible interpolator to use when joining the streams. + /// Mapping function from input to output messages. + /// An optional delivery policy to use for the streams. + /// Output stream. + public static IProducer Join( + this IEnumerable> inputs, + ReproducibleInterpolator interpolator, + Func outputCreator, + DeliveryPolicy deliveryPolicy = null) + { + var count = inputs.Count(); + if (count > 1) + { + var buffer = new TOut[count]; + return Join( + inputs.First(), + inputs.Skip(1), + interpolator, + (m, secondaryArray) => + { + buffer[0] = outputCreator(m); + for (int i = 1; i < count; i++) + { + buffer[i] = outputCreator(secondaryArray[i - 1]); + } - var i = 0; - foreach (var input in inputs) + return buffer; + }, + deliveryPolicy, + deliveryPolicy); + } + else if (count == 1) { - input.PipeTo(join.InSecondaries[i++], secondaryDeliveryPolicy); + return inputs.First().Select(x => new[] { outputCreator(x) }, deliveryPolicy); + } + else + { + throw new ArgumentException("Vector join with empty inputs collection."); } - - return join; } /// - /// Sparse vector join. + /// Joins an enumeration of streams into a vector stream, based on a specified reproducible interpolator. /// - /// Type of input messages. - /// Type of key values. - /// Type of the interpolation result. - /// Primary stream. - /// Collection of secondary streams. + /// Type of input stream messages. + /// Collection of input streams. /// Reproducible interpolator to use when joining the streams. - /// An optional delivery policy for the primary stream. - /// An optional delivery policy for the secondary stream(s). + /// An optional delivery policy to use for the streams. /// Output stream. - public static Join, TIn, TInterpolation, Dictionary> Join( - this IProducer> primary, - IEnumerable> inputs, - ReproducibleInterpolator interpolator, - DeliveryPolicy> primaryDeliveryPolicy = null, - DeliveryPolicy secondaryDeliveryPolicy = null) + public static IProducer Join( + this IEnumerable> inputs, + ReproducibleInterpolator interpolator, + DeliveryPolicy deliveryPolicy = null) { - var buffer = new Dictionary(); - return primary.Join( - inputs, - interpolator, - (keys, values) => - { - buffer.Clear(); - foreach (var keyPair in keys) - { - buffer[keyPair.Key] = values[keyPair.Value]; - } - - return buffer; - }, - primaryDeliveryPolicy, - secondaryDeliveryPolicy); + return inputs.Join(interpolator, _ => _, deliveryPolicy); } - } - #endregion Sparse vector joins + #endregion Vector joins + } } \ No newline at end of file diff --git a/Sources/Runtime/Microsoft.Psi/Operators/Pickers.cs b/Sources/Runtime/Microsoft.Psi/Operators/Pickers.cs index 7c4e08aae..46fd58a1e 100644 --- a/Sources/Runtime/Microsoft.Psi/Operators/Pickers.cs +++ b/Sources/Runtime/Microsoft.Psi/Operators/Pickers.cs @@ -4,6 +4,8 @@ namespace Microsoft.Psi { using System; + using System.Collections.Generic; + using Microsoft.Psi.Components; /// /// Extension methods that simplify operator usage. @@ -18,9 +20,8 @@ public static partial class Operators /// Predicate function by which to filter messages. /// An optional delivery policy. /// Output stream. - public static IProducer Where(this IProducer source, Func condition, DeliveryPolicy deliveryPolicy = null) - { - return Process( + public static IProducer Where(this IProducer source, Func condition, DeliveryPolicy deliveryPolicy = null) => + Process( source, (d, e, s) => { @@ -30,7 +31,6 @@ public static IProducer Where(this IProducer source, Func /// Filter messages to those where a given condition is met. @@ -40,10 +40,8 @@ public static IProducer Where(this IProducer source, FuncPredicate function by which to filter messages. /// An optional delivery policy. /// Output stream. - public static IProducer Where(this IProducer source, Predicate condition, DeliveryPolicy deliveryPolicy = null) - { - return Where(source, (d, e) => condition(d), deliveryPolicy); - } + public static IProducer Where(this IProducer source, Predicate condition, DeliveryPolicy deliveryPolicy = null) => + Where(source, (d, e) => condition(d), deliveryPolicy); /// /// Filter stream to the first n messages. @@ -53,21 +51,80 @@ public static IProducer Where(this IProducer source, Predicate condi /// Number of messages. /// An optional delivery policy. /// Output stream. - public static IProducer First(this IProducer source, int number, DeliveryPolicy deliveryPolicy = null) + public static IProducer First(this IProducer source, int number, DeliveryPolicy deliveryPolicy = null) => + source.Where(v => number-- > 0, deliveryPolicy); + + /// + /// Filter stream to the first message (single-message stream). + /// + /// Type of source/output messages. + /// Source stream. + /// An optional delivery policy. + /// An output stream containing only the first message. + public static IProducer First(this IProducer source, DeliveryPolicy deliveryPolicy = null) => + First(source, 1, deliveryPolicy); + + /// + /// Filter stream to the last n messages. + /// + /// Type of source/output messages. + /// Source stream. + /// The number of messages to filter. + /// An optional delivery policy. + /// An output stream containing only the last message. + public static IProducer Last(this IProducer source, int count, DeliveryPolicy deliveryPolicy = null) { - return source.Where(v => number-- > 0, deliveryPolicy); + var lastValues = new List<(T, DateTime)>(); + var processor = new Processor( + source.Out.Pipeline, + (t, envelope, _) => + { + lastValues.Add((t.DeepClone(), envelope.OriginatingTime)); + if (lastValues.Count > count) + { + lastValues.RemoveAt(0); + } + }, + (_, emitter) => + { + foreach ((var t, var originatingTime) in lastValues) + { + emitter.Post(t, originatingTime); + } + }); + + return source.PipeTo(processor, deliveryPolicy); } /// - /// Filter stream to the first message (single-message stream). + /// Filter stream to the last message. /// /// Type of source/output messages. /// Source stream. /// An optional delivery policy. - /// Output stream. - public static IProducer First(this IProducer source, DeliveryPolicy deliveryPolicy = null) + /// An output stream containing only the last message. + public static IProducer Last(this IProducer source, DeliveryPolicy deliveryPolicy = null) { - return First(source, 1, deliveryPolicy); + var captured = false; + T last = default; + DateTime lastOriginatingTime = default; + var processor = new Processor( + source.Out.Pipeline, + (t, envelope, _) => + { + captured = true; + t.DeepClone(ref last); + lastOriginatingTime = envelope.OriginatingTime; + }, + (_, emitter) => + { + if (captured) + { + emitter.Post(last, lastOriginatingTime); + } + }); + + return source.PipeTo(processor, deliveryPolicy); } } } diff --git a/Sources/Runtime/Microsoft.Psi/Operators/Time.cs b/Sources/Runtime/Microsoft.Psi/Operators/Time.cs index 5eedcbe6b..8a6a564ea 100644 --- a/Sources/Runtime/Microsoft.Psi/Operators/Time.cs +++ b/Sources/Runtime/Microsoft.Psi/Operators/Time.cs @@ -36,17 +36,25 @@ public static IProducer Latency(this IProducer source, DeliveryP } /// - /// Delay messages by given time span. + /// Delays the delivery of messages by a given time span. /// - /// Type of source/output messages. - /// Source stream. - /// Time span by which to delay. + /// The type of the source/output messages. + /// The source stream. + /// The time span by which to delay the messages. /// An optional delivery policy. - /// Output stream. + /// The output stream. + /// + /// This operator delays the delivery of messages on the source stream by a fixed amount of time + /// ahead of the creation time of the source messages. This ensures that the messages are not + /// delivered to the downstream receiver(s) until the pipeline clock has advanced to at least + /// the delayed time. The observed delay may be slightly larger than the specified time span to + /// account for latencies at the emitters and receivers. The originating times of the source + /// messages are preserved. + /// public static IProducer Delay(this IProducer source, TimeSpan delay, DeliveryPolicy deliveryPolicy = null) { return source - .Process((d, e, s) => s.Post((d, e.OriginatingTime), e.OriginatingTime + delay), deliveryPolicy) + .Process((d, e, s) => s.Post((d, e.OriginatingTime), e.CreationTime + delay), deliveryPolicy) .Process<(T, DateTime), T>((t, _, s) => s.Post(t.Item1, t.Item2), DeliveryPolicy.SynchronousOrThrottle); } } diff --git a/Sources/Runtime/Microsoft.Psi/Operators/Timers.cs b/Sources/Runtime/Microsoft.Psi/Operators/Timers.cs index 45f5e51e6..033d0f1b8 100644 --- a/Sources/Runtime/Microsoft.Psi/Operators/Timers.cs +++ b/Sources/Runtime/Microsoft.Psi/Operators/Timers.cs @@ -19,7 +19,7 @@ public static class Timers /// but if the pipeline slows down, the interval between messages might not appear constant. /// /// The type of data in the sequence. - /// The pipeline that will run this generator. + /// The pipeline to add the component to. /// The interval at which to generate messages. /// The function generating the messages. /// A stream of messages of type T. @@ -35,7 +35,7 @@ public static IProducer Timer(Pipeline pipeline, TimeSpan interval, Func - /// The pipeline that will run this generator. + /// The pipeline to add the component to. /// The interval at which to generate messages. /// A stream of messages representing time elapsed since the start of the pipeline. public static IProducer Timer(Pipeline pipeline, TimeSpan interval) diff --git a/Sources/Runtime/Microsoft.Psi/Persistence/InfiniteFileReader.cs b/Sources/Runtime/Microsoft.Psi/Persistence/InfiniteFileReader.cs index a2bc607d2..89d3365b2 100644 --- a/Sources/Runtime/Microsoft.Psi/Persistence/InfiniteFileReader.cs +++ b/Sources/Runtime/Microsoft.Psi/Persistence/InfiniteFileReader.cs @@ -65,16 +65,6 @@ public static bool IsActive(string name, string path) return true; } - /// - /// Indicates whether more data might be added to this file - /// (i.e. the file still has an active writer). - /// - /// Returns true if there is an active writer to this file. - public bool IsMoreDataExpected() - { - return InfiniteFileWriter.IsActive(this.fileName, this.path); - } - public void Dispose() { this.writePulse.Dispose(); @@ -102,7 +92,7 @@ public void Seek(int extentId, int position) /// /// Returns true if we are in the middle of a block or /// if we are positioned at the start of the block and the block size prefix is greater than zero. - /// If false, use to determine if there could ever be more data + /// If false, use to determine if there could ever be more data /// (i.e. if a writer is still active). /// /// True if more data is present, false if no more data is available. diff --git a/Sources/Runtime/Microsoft.Psi/Persistence/InfiniteFileWriter.cs b/Sources/Runtime/Microsoft.Psi/Persistence/InfiniteFileWriter.cs index d3204d598..c44613509 100644 --- a/Sources/Runtime/Microsoft.Psi/Persistence/InfiniteFileWriter.cs +++ b/Sources/Runtime/Microsoft.Psi/Persistence/InfiniteFileWriter.cs @@ -14,7 +14,6 @@ namespace Microsoft.Psi.Persistence internal unsafe sealed class InfiniteFileWriter : IDisposable { internal const string FileNameFormat = "{0}_{1:000000}.psi"; - internal const string ActiveWriterMutexFormat = @"Global\ActiveWriterMutex_{0}_{1}"; private const string PulseEventFormat = @"Global\PulseEvent_{0}_{1}"; private readonly object syncRoot = new object(); private string extentName; @@ -32,7 +31,6 @@ internal unsafe sealed class InfiniteFileWriter : IDisposable private bool disposed = false; private EventWaitHandle localWritePulse; private Mutex globalWritePulse; - private Mutex activeWriterMutex; private Queue priorExtents; private int priorExtentQueueLength; private object viewDisposeLock = new object(); @@ -52,7 +50,16 @@ public InfiniteFileWriter(string path, string fileName, int extentSize) this.localWritePulse = new EventWaitHandle(false, EventResetMode.ManualReset); new Thread(new ThreadStart(() => { - this.globalWritePulse = new Mutex(true, PulseEventName(path, fileName)); + try + { + this.globalWritePulse = new Mutex(true, PulseEventName(path, fileName)); + } + catch (UnauthorizedAccessException) + { + // Some platforms don't allow global mutexes. In this case + // we can still continue on with a slight perf degradation. + } + try { while (!this.disposed) @@ -67,12 +74,6 @@ public InfiniteFileWriter(string path, string fileName, int extentSize) // ignore } })) { IsBackground = true }.Start(); - bool isSingleWriter; - this.activeWriterMutex = new Mutex(false, ActiveWriterMutexName(path, fileName), out isSingleWriter); - if (!isSingleWriter) - { - throw new IOException("The file is already opened in write mode."); - } this.CreateNewExtent(); } @@ -87,24 +88,6 @@ public InfiniteFileWriter(string path, string fileName, int extentSize) public int CurrentBlockStart => (int)(this.freePointer - this.startPointer); - /// - /// Indicates whether the specified file has an active writer. - /// - /// Infinite file name. - /// Infinite file path. - /// Returns true if there is an active writer to this file. - public static bool IsActive(string name, string path) - { - Mutex writerActiveMutex; - if (!Mutex.TryOpenExisting(InfiniteFileWriter.ActiveWriterMutexName(path, name), out writerActiveMutex)) - { - return false; - } - - writerActiveMutex.Dispose(); - return true; - } - public void Dispose() { this.CloseCurrent(true); @@ -122,10 +105,8 @@ public void Dispose() this.localWritePulse.Set(); this.localWritePulse.Dispose(); this.localWritePulse = null; - this.globalWritePulse.Dispose(); + this.globalWritePulse?.Dispose(); this.globalWritePulse = null; - this.activeWriterMutex.Dispose(); - this.activeWriterMutex = null; // may have already been disposed in CloseCurrent this.view?.Dispose(); @@ -231,11 +212,6 @@ internal static string PulseEventName(string path, string fileName) return MakeHandleName(PulseEventFormat, path, fileName); } - internal static string ActiveWriterMutexName(string path, string fileName) - { - return MakeHandleName(ActiveWriterMutexFormat, path, fileName); - } - private static string MakeHandleName(string format, string path, string fileName) { var name = string.Format(format, path?.ToLower().GetDeterministicHashCode(), fileName.ToLower()); diff --git a/Sources/Runtime/Microsoft.Psi/Persistence/MessageReader.cs b/Sources/Runtime/Microsoft.Psi/Persistence/MessageReader.cs index d1b7cf11e..b06c3c3e0 100644 --- a/Sources/Runtime/Microsoft.Psi/Persistence/MessageReader.cs +++ b/Sources/Runtime/Microsoft.Psi/Persistence/MessageReader.cs @@ -41,8 +41,6 @@ public MessageReader(string fileName, string path) public int CurrentMessageStart => this.fileReader.CurrentBlockStart; - public bool IsMoreDataExpected() => this.fileReader.IsMoreDataExpected(); - public void Seek(int extentId, int position) { this.fileReader.Seek(extentId, position); diff --git a/Sources/Runtime/Microsoft.Psi/Persistence/MessageWriter.cs b/Sources/Runtime/Microsoft.Psi/Persistence/MessageWriter.cs index dab58a719..e92ac6555 100644 --- a/Sources/Runtime/Microsoft.Psi/Persistence/MessageWriter.cs +++ b/Sources/Runtime/Microsoft.Psi/Persistence/MessageWriter.cs @@ -17,13 +17,9 @@ internal sealed class MessageWriter : IDisposable private const int DefaultRetentionQueueLength64 = 6; private const int DefaultRetentionQueueLength32 = 0; private InfiniteFileWriter fileWriter; - private string name; - private string path; public MessageWriter(string name, string path, int extentSize = 0) { - this.name = name; - this.path = path; if (extentSize == 0) { extentSize = Environment.Is64BitProcess ? DefaultExtentCapacity64 : DefaultExtentCapacity32; diff --git a/Sources/Runtime/Microsoft.Psi/Persistence/MetadataCache.cs b/Sources/Runtime/Microsoft.Psi/Persistence/MetadataCache.cs index 9c5f76fb5..65436f6cb 100644 --- a/Sources/Runtime/Microsoft.Psi/Persistence/MetadataCache.cs +++ b/Sources/Runtime/Microsoft.Psi/Persistence/MetadataCache.cs @@ -165,7 +165,7 @@ public void Update() this.streamTimeInterval = GetTimeRange(newStreamDescriptors.Values, meta => meta.StreamTimeInterval); // clean up if the catalog is closed and we really reached the end - if (!this.catalogReader.IsMoreDataExpected() && !this.catalogReader.HasMoreData()) + if (!PsiStoreMonitor.IsStoreLive(this.name, this.path) && !this.catalogReader.HasMoreData()) { this.catalogReader.Dispose(); this.catalogReader = null; diff --git a/Sources/Runtime/Microsoft.Psi/Persistence/PageIndexCache.cs b/Sources/Runtime/Microsoft.Psi/Persistence/PageIndexCache.cs index e6e4a6bdd..bba2f5015 100644 --- a/Sources/Runtime/Microsoft.Psi/Persistence/PageIndexCache.cs +++ b/Sources/Runtime/Microsoft.Psi/Persistence/PageIndexCache.cs @@ -11,11 +11,13 @@ namespace Microsoft.Psi.Persistence internal class PageIndexCache : IDisposable { private readonly object syncRoot = new object(); + private readonly string name; private IndexEntry[] pageIndex = new IndexEntry[0]; private InfiniteFileReader indexReader; public PageIndexCache(string name, string path) { + this.name = name; this.indexReader = new InfiniteFileReader(path, PsiStoreCommon.GetIndexFileName(name)); } @@ -97,7 +99,7 @@ private void Update() newList.Add(indexEntry); } - if (!this.indexReader.IsMoreDataExpected()) + if (!PsiStoreMonitor.IsStoreLive(this.name, this.indexReader.Path)) { this.indexReader.Dispose(); this.indexReader = null; diff --git a/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreCommon.cs b/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreCommon.cs index 5529afe7d..a9fc4d00c 100644 --- a/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreCommon.cs +++ b/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreCommon.cs @@ -17,6 +17,7 @@ internal static class PsiStoreCommon internal static readonly string IndexFileName = "Index"; internal static readonly string DataFileName = "Data"; internal static readonly string LargeDataFileName = "LargeData"; + internal static readonly string LivePsiStoreFileName = "Live"; internal static string GetIndexFileName(string appName) { @@ -38,6 +39,11 @@ internal static string GetLargeDataFileName(string appName) return appName + "." + LargeDataFileName; } + internal static string GetLivePsiStoreFileName(string appName) + { + return appName + "." + LivePsiStoreFileName; + } + internal static bool TryGetPathToLatestVersion(string appName, string rootPath, out string fullPath) { fullPath = null; diff --git a/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreMonitor.cs b/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreMonitor.cs new file mode 100644 index 000000000..82332132d --- /dev/null +++ b/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreMonitor.cs @@ -0,0 +1,161 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Persistence +{ + using System; + using System.Collections.Generic; + using System.IO; + using System.Linq; + using System.Threading; + + /// + /// Represents a class that monitors live stores. + /// + public static class PsiStoreMonitor + { + /// + /// The frequency with which we check whether the live marker file for a store + /// exists and cannot be read from because the writer holds an exclusive share. + /// + private const int UpdatePeriodMs = 5000; + + /// + /// The timeout period during which no client asks for status of a monitored live + /// store before it gets removed from the the collection of monitored live stores. + /// + private const int AccessPeriodTimeoutMs = 30000; + + /// + /// The collection of stores whose live marker files are currently being tracked. + /// + private static Dictionary<(string storeName, string storePath), MarkerFileInfo> monitoredStores = new Dictionary<(string storeName, string storePath), MarkerFileInfo>(); + + /// + /// A lock to ensure the above collection is not modified while it's being iterated. + /// + private static object collectionLock = new object(); + + /// + /// The timer that controls when we check whether the live marker files exist + /// and can be read from. + /// + private static Timer updateTime = new Timer(OnUpdateTimer, null, UpdatePeriodMs, UpdatePeriodMs); + + /// + /// Gets the name of the marker file that can be used to determine if a store is currently live. + /// + /// The name of the store. + /// The path to the store. + /// True if the store is live, otherwise false.. + public static bool IsStoreLive(string storeName, string storePath) + { + // Check if we're tracking the marker file yet. + if (monitoredStores.ContainsKey((storeName, storePath))) + { + MarkerFileInfo markerFileInfo = monitoredStores[(storeName, storePath)]; + markerFileInfo.LastAccessTime = DateTime.UtcNow; + return markerFileInfo.IsLive; + } + else + { + // Create a record for the live marker file and update its initial status. + MarkerFileInfo markerFileInfo = new MarkerFileInfo(storeName, storePath); + UpdateMarkerFileInfo(markerFileInfo); + + lock (collectionLock) + { + monitoredStores[(storeName, storePath)] = markerFileInfo; + } + + return markerFileInfo.IsLive; + } + } + + /// + /// Gets the path to the live marker file. + /// + /// The name of the store. + /// The path to the store. + /// The full path to the live marker file. + public static string GetLiveMarkerFileName(string storeName, string storePath) + { + // Virtual (in-memory) stores have no path. For such stores + // we place the live marker file into the user's temp folder. + string liveMarkerFilePath = storePath; + if (liveMarkerFilePath == null) + { + liveMarkerFilePath = System.IO.Path.Combine(Path.GetTempPath(), "PsiStoreLiveMarkers"); + if (!Directory.Exists(liveMarkerFilePath)) + { + Directory.CreateDirectory(liveMarkerFilePath); + } + } + + return Path.Combine(liveMarkerFilePath, PsiStoreCommon.GetLivePsiStoreFileName(storeName)); + } + + private static void OnUpdateTimer(object state) + { + lock (collectionLock) + { + // Remove any monitored stores where no client has asked for its status in a while + monitoredStores = monitoredStores + .Where(ms => ms.Value.LastAccessTime.AddMilliseconds(AccessPeriodTimeoutMs) > DateTime.UtcNow) + .ToDictionary(ms => ms.Key, ms => ms.Value); + + // Check the status of all stores that are still showing as live + foreach (var markerFileInfo in monitoredStores.Values) + { + if (markerFileInfo.IsLive) + { + UpdateMarkerFileInfo(markerFileInfo); + } + } + } + } + + private static void UpdateMarkerFileInfo(MarkerFileInfo markerFileInfo) + { + // A non-live store will never become live again + if (markerFileInfo.IsLive) + { + // Check if the live marker file exists + if (File.Exists(markerFileInfo.FilePath)) + { + markerFileInfo.IsLive = !Platform.Specific.CanOpenFile(markerFileInfo.FilePath); + } + else + { + // The marker file does not exist, which means the writer finished and deleted it + markerFileInfo.IsLive = false; + } + } + } + + private class MarkerFileInfo + { + public MarkerFileInfo(string storeName, string storePath) + { + this.FilePath = GetLiveMarkerFileName(storeName, storePath); + this.IsLive = true; + this.LastAccessTime = DateTime.UtcNow; + } + + /// + /// Gets or sets the path to the live marker file. + /// + public string FilePath { get; set; } + + /// + /// Gets or sets a value indicating whether the store is currently live. + /// + public bool IsLive { get; set; } + + /// + /// Gets or sets the last time a client checked the status of the marker file. + /// + public DateTime LastAccessTime { get; set; } + } + } +} diff --git a/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreReader.cs b/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreReader.cs index f0adbeda2..bee6afcdd 100644 --- a/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreReader.cs +++ b/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreReader.cs @@ -115,29 +115,6 @@ public PsiStoreReader(PsiStoreReader other) /// public RuntimeInfo RuntimeVersion => this.metadataCache.Resource.RuntimeVersion; - /// - /// Indicates whether the specified data store has an active writer. - /// - /// The store name. - /// The store path. - /// Returns true if there is an active data file writer to this store. - public static bool IsStoreLive(string storeName, string storePath) - { - if (!Mutex.TryOpenExisting(InfiniteFileWriter.ActiveWriterMutexName(storePath, PsiStoreCommon.GetCatalogFileName(storeName)), out Mutex writerActiveMutex)) - { - return false; - } - - writerActiveMutex.Dispose(); - return true; - } - - /// - /// Indicates whether this store is still being written to by an active writer. - /// - /// True if an active writer is still writing to this store, false otherwise. - public bool IsMoreDataExpected() => this.messageReader.IsMoreDataExpected(); - /// /// Opens the specified stream for reading. /// @@ -323,7 +300,7 @@ public bool MoveNext(out Envelope envelope) var hasData = this.AutoOpenAllStreams ? this.messageReader.MoveNext() : this.messageReader.MoveNext(this.enabledStreams); if (!hasData) { - if (!this.messageReader.IsMoreDataExpected()) + if (!PsiStoreMonitor.IsStoreLive(this.Name, this.Path)) { return false; } diff --git a/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreWriter.cs b/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreWriter.cs index bee6f6754..85ea25c2e 100644 --- a/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreWriter.cs +++ b/Sources/Runtime/Microsoft.Psi/Persistence/PsiStoreWriter.cs @@ -9,6 +9,7 @@ namespace Microsoft.Psi.Persistence using System.Linq; using Microsoft.Psi.Common; using Microsoft.Psi.Data; + using Microsoft.Psi.Serialization; /// /// Implements a writer that can write multiple streams to the same file, @@ -37,13 +38,20 @@ public sealed class PsiStoreWriter : IDisposable private readonly InfiniteFileWriter catalogWriter; private readonly InfiniteFileWriter pageIndexWriter; private readonly MessageWriter writer; - private readonly Dictionary metadata = new Dictionary(); + private readonly Dictionary metadata = new (); + private readonly BufferWriter metadataBuffer = new (128); + private readonly BufferWriter indexBuffer = new (24); private MessageWriter largeMessageWriter; - private BufferWriter metadataBuffer = new BufferWriter(128); - private BufferWriter indexBuffer = new BufferWriter(24); private int unindexedBytes = IndexPageSize; private IndexEntry nextIndexEntry; + /// + /// This file is opened in exclusive share mode when the exporter is constructed, and is + /// deleted when it gets disposed. Other processes can check the live status of the store + /// by attempting to also open this file. If that fails, then the store is still live. + /// + private FileStream liveMarkerFile; + /// /// Initializes a new instance of the class. /// @@ -64,12 +72,7 @@ public PsiStoreWriter(string name, string path, bool createSubdirectory = true) { var existingIds = Directory.EnumerateDirectories(this.path, this.name + ".????") .Select(d => d.Split('.').Last()) - .Where( - n => - { - int i; - return int.TryParse(n, out i); - }) + .Where(n => int.TryParse(n, out _)) .Select(n => int.Parse(n)); id = (existingIds.Count() == 0) ? 0 : existingIds.Max() + 1; @@ -84,12 +87,16 @@ public PsiStoreWriter(string name, string path, bool createSubdirectory = true) } } + // Open the live store marker file in exclusive file share mode. This will fail + // if another process is already writing a store with the same name and path. + this.liveMarkerFile = File.Open(PsiStoreMonitor.GetLiveMarkerFileName(this.Name, this.Path), FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None); + this.catalogWriter = new InfiniteFileWriter(this.path, PsiStoreCommon.GetCatalogFileName(this.name), CatalogExtentSize); this.pageIndexWriter = new InfiniteFileWriter(this.path, PsiStoreCommon.GetIndexFileName(this.name), IndexExtentSize); this.writer = new MessageWriter(PsiStoreCommon.GetDataFileName(this.name), this.path); // write the first index entry - this.UpdatePageIndex(0, default(Envelope)); + this.UpdatePageIndex(0, default); } /// @@ -102,6 +109,11 @@ public PsiStoreWriter(string name, string path, bool createSubdirectory = true) /// public string Path => this.path; + /// + /// Gets stream metadata. + /// + public IEnumerable Metadata => this.metadata.Values; + /// /// Closes the store. /// @@ -111,17 +123,31 @@ public void Dispose() this.catalogWriter.Dispose(); this.writer.Dispose(); this.largeMessageWriter?.Dispose(); + this.liveMarkerFile?.Dispose(); + + // If the live store marker file exists, try to delete it. + string liveMarkerFilePath = PsiStoreMonitor.GetLiveMarkerFileName(this.Name, this.Path); + if (File.Exists(liveMarkerFilePath)) + { + try + { + File.Delete(liveMarkerFilePath); + } + catch + { + } + } } /// /// Creates a stream to write messages to. /// The stream characteristics are extracted from the provided metadata descriptor. /// - /// The metadata describing the stream to open. + /// The metadata describing the stream to open. /// The complete metadata for the stream just created. - public PsiStreamMetadata OpenStream(PsiStreamMetadata meta) + public PsiStreamMetadata OpenStream(PsiStreamMetadata metadata) { - return this.OpenStream(meta.Id, meta.Name, meta.IsIndexed, meta.TypeName).UpdateSupplementalMetadataFrom(meta); + return this.OpenStream(metadata.Id, metadata.Name, metadata.IsIndexed, metadata.TypeName).UpdateSupplementalMetadataFrom(metadata); } /// @@ -139,34 +165,34 @@ public PsiStreamMetadata OpenStream(int streamId, string streamName, bool indexe throw new InvalidOperationException($"The stream id {streamId} has already been registered with this writer."); } - var meta = new PsiStreamMetadata(streamName, streamId, typeName); - meta.OpenedTime = Time.GetCurrentTime(); - meta.IsPersisted = true; - meta.IsIndexed = indexed; - meta.StoreName = this.name; - meta.StorePath = this.path; - this.metadata[streamId] = meta; - this.WriteToCatalog(meta); + var psiStreamMetadata = new PsiStreamMetadata(streamName, streamId, typeName) + { + OpenedTime = Time.GetCurrentTime(), + IsPersisted = true, + IsIndexed = indexed, + StoreName = this.name, + StorePath = this.path, + }; + this.metadata[streamId] = psiStreamMetadata; + this.WriteToCatalog(psiStreamMetadata); // make sure we have a large file if needed if (indexed) { - this.largeMessageWriter = this.largeMessageWriter ?? new MessageWriter(PsiStoreCommon.GetLargeDataFileName(this.name), this.path); + this.largeMessageWriter ??= new MessageWriter(PsiStoreCommon.GetLargeDataFileName(this.name), this.path); } - return meta; + return psiStreamMetadata; } /// /// Attempt to get stream metadata (available once stream has been opened). /// /// The id of the stream, unique for this store. - /// The metadata for the stream, if it has previously been opened. + /// The metadata for the stream, if it has previously been opened. /// True if stream metadata if stream has been opened so that metadata is available. - public bool TryGetMetadata(int streamId, out PsiStreamMetadata meta) - { - return this.metadata.TryGetValue(streamId, out meta); - } + public bool TryGetMetadata(int streamId, out PsiStreamMetadata metadata) => + this.metadata.TryGetValue(streamId, out metadata); /// /// Closes the stream and persists the stream statistics. @@ -217,11 +243,11 @@ public void Write(BufferReader buffer, Envelope envelope) { var meta = this.metadata[envelope.SourceId]; meta.Update(envelope, buffer.RemainingLength); - int bytes = 0; + int bytes; if (meta.IsIndexed) { // write the object index entry in the data file and the buffer in the large data file - IndexEntry indexEntry = default(IndexEntry); + var indexEntry = default(IndexEntry); indexEntry.ExtentId = int.MinValue + this.largeMessageWriter.CurrentExtentId; // negative value indicates an index into the large file indexEntry.Position = this.largeMessageWriter.CurrentMessageStart; indexEntry.CreationTime = envelope.CreationTime; @@ -246,15 +272,27 @@ public void Write(BufferReader buffer, Envelope envelope) this.UpdatePageIndex(bytes, envelope); } + /// + /// Writes the runtime info to the catalog. + /// + /// The runtime info. + internal void WriteToCatalog(RuntimeInfo runtimeInfo) => this.WriteToCatalog((Metadata)runtimeInfo); + + /// + /// Writes the type schema to the catalog. + /// + /// The type schema. + internal void WriteToCatalog(TypeSchema typeSchema) => this.WriteToCatalog((Metadata)typeSchema); + /// /// Writes details about a stream to the stream catalog. /// - /// The stream descriptor to write. - internal void WriteToCatalog(Metadata meta) + /// The stream descriptor to write. + private void WriteToCatalog(Metadata metadata) { lock (this.catalogWriter) { - meta.Serialize(this.metadataBuffer); + metadata.Serialize(this.metadataBuffer); this.catalogWriter.Write(this.metadataBuffer); this.catalogWriter.Flush(); diff --git a/Sources/Runtime/Microsoft.Psi/Remoting/RemoteClockExporter.cs b/Sources/Runtime/Microsoft.Psi/Remoting/RemoteClockExporter.cs new file mode 100644 index 000000000..fb599819c --- /dev/null +++ b/Sources/Runtime/Microsoft.Psi/Remoting/RemoteClockExporter.cs @@ -0,0 +1,110 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Remoting +{ + using System; + using System.Diagnostics; + using System.IO; + using System.Net; + using System.Net.Sockets; + using System.Threading; + + /// + /// Component that exports pipeline clock information over TCP to enable synchronization. + /// + public class RemoteClockExporter : IDisposable + { + internal const short ProtocolVersion = 0; + internal const int DefaultPort = 11511; + + private TcpListener listener; + private bool isDisposing; + + /// + /// Initializes a new instance of the class. + /// + /// The connection port. + public RemoteClockExporter(int port = DefaultPort) + { + this.Port = port; + this.listener = new TcpListener(IPAddress.Any, port); + this.Start(); + } + + /// + /// Gets the connection port. + /// + public int Port { get; } + + /// + /// Gets or sets a value indicating whether this machine hosts the primary pipeline clock. + /// + internal static bool IsPrimaryClockSourceMachine { get; set; } = false; + + /// + public void Dispose() + { + this.isDisposing = true; + this.listener.Stop(); + this.listener = null; + } + + private void Start() + { + new Thread(new ThreadStart(this.Listen)) { IsBackground = true }.Start(); + } + + private void Listen() + { + if (this.listener != null) + { + NetworkStream networkStream = null; + try + { + this.listener.Start(); + networkStream = this.listener.AcceptTcpClient().GetStream(); + + // clock synchroniztion + IsPrimaryClockSourceMachine = true; + if (RemoteClockImporter.PrimaryClockSourceMachineName != Environment.MachineName && + RemoteClockImporter.PrimaryClockSourceMachineName.Length > 0) + { + // client intends to use this machine as the primary clock source. However, a + // RemoteClockImporter on this machine also intends to sync with some other machine! + throw new ArgumentException( + $"A {nameof(RemoteClockImporter)} on this machine is expecting the remote machine ({RemoteClockImporter.PrimaryClockSourceMachineName}) " + + $"to serve as the primary clock, but this machine is instead being asked to serve as the primary." + + $"There may be only one machine hosting the primary clock."); + } + + // check protocol version + using var reader = new BinaryReader(networkStream); + var version = reader.ReadInt16(); + if (version != ProtocolVersion) + { + throw new IOException($"Unsupported remote clock protocol version: {version}"); + } + + using var writer = new BinaryWriter(networkStream); + writer.Write(DateTime.UtcNow.Ticks); // current machine time, used by client to sync clocks + writer.Write(Environment.MachineName); + writer.Flush(); + } + catch (Exception ex) + { + Trace.WriteLine($"{nameof(RemoteClockExporter)} Exception: {ex.Message}"); + } + finally + { + networkStream?.Dispose(); + if (!this.isDisposing) + { + this.listener.Stop(); + this.Start(); + } + } + } + } + } +} diff --git a/Sources/Runtime/Microsoft.Psi/Remoting/RemoteClockImporter.cs b/Sources/Runtime/Microsoft.Psi/Remoting/RemoteClockImporter.cs new file mode 100644 index 000000000..63a206116 --- /dev/null +++ b/Sources/Runtime/Microsoft.Psi/Remoting/RemoteClockImporter.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Remoting +{ + using System; + using System.Diagnostics; + using System.IO; + using System.Net.Sockets; + using System.Threading; + using Microsoft.Psi; + + /// + /// Component that reads remote clock information over TCP and synchronizes the local pipeline clock. + /// + public class RemoteClockImporter : IDisposable + { + private readonly Pipeline pipeline; + private readonly string host; + private readonly int port; + private readonly TcpClient client; + private readonly EventWaitHandle connected = new (false, EventResetMode.ManualReset); + + /// + /// Initializes a new instance of the class. + /// + /// The pipeline to add the component to. + /// The host name of the remote clock exporter/server. + /// The port on which to connect. + public RemoteClockImporter(Pipeline pipeline, string host, int port = RemoteClockExporter.DefaultPort) + { + this.pipeline = pipeline; + this.client = new TcpClient(); + this.host = host; + this.port = port; + this.connected.Reset(); + new Thread(new ThreadStart(this.SynchronizeLocalPipelineClock)) { IsBackground = true }.Start(); + } + + /// + /// Gets wait handle for remote connection being established. + /// + /// This should be waited on prior to running the pipeline. + public EventWaitHandle Connected + { + get { return this.connected; } + } + + /// + /// Gets or sets machine with which to synchronize pipeline clock. + /// + internal static string PrimaryClockSourceMachineName { get; set; } = string.Empty; + + /// + public void Dispose() + { + this.client.Close(); + this.connected.Dispose(); + } + + private void SynchronizeLocalPipelineClock() + { + var completed = false; + while (!completed) + { + NetworkStream networkStream = null; + try + { + Trace.WriteLine($"Attempting to connect to {this.port}"); + this.client.Connect(this.host, this.port); + networkStream = this.client.GetStream(); + Trace.WriteLine($"Connected to {this.port}."); + + // send protocol version + using var writer = new BinaryWriter(networkStream); + var stopwatch = new Stopwatch(); + stopwatch.Start(); + writer.Write(RemoteClockExporter.ProtocolVersion); + + using var reader = new BinaryReader(networkStream); + var timeAtExporter = reader.ReadInt64(); + stopwatch.Stop(); + var timeAtImporter = DateTime.UtcNow.Ticks; + var elapsedTime = stopwatch.ElapsedTicks; + var machine = reader.ReadString(); + + // Elapsed time includes the complete round trip latency between writing the header and receiving the + // remote (exporter) machine's time. We assume that half of the time was from here to the exporter, meaning + // that subtracting elapsed / 2 from our current time gives the time as it was on our clock when the exporter + // sent it's time. The difference becomes an offset to apply to our pipeline clock to synchronize. + var timeOffset = TimeSpan.FromTicks(timeAtExporter - (timeAtImporter - (elapsedTime / 2))); + Trace.WriteLine($"{nameof(RemoteClockImporter)} clock sync: Local={timeAtImporter} Remote[{machine}]={timeAtExporter} Latency={elapsedTime} Offset={timeOffset.Ticks}."); + if (machine == Environment.MachineName) + { + // The "remote" machine is actually *this* machine. In this case, assume exactly zero offset. + Trace.WriteLine($"{nameof(RemoteClockImporter)} clock sync with self ignored ({machine}). Pipeline clock will remain unchanged."); + timeOffset = TimeSpan.Zero; + } + else if (RemoteClockExporter.IsPrimaryClockSourceMachine) + { + // An exporter on this machine already thinks that *this* is the primary source, but this importer + // is attempting to synchronize with some other machine instead! + throw new ArgumentException( + $"{nameof(RemoteClockImporter)} treating remote machine ({machine}) as the primary clock source, but this machine ({Environment.MachineName}) is already the " + + $"primary. There may be only one machine hosting the primary clock. Check {nameof(RemoteClockImporter)} configurations."); + } + + if (PrimaryClockSourceMachineName != machine && PrimaryClockSourceMachineName.Length > 0) + { + // Another importer on this machine has already negotiated a clock sync with some machine other than + // the one that this importer is syncing with. Importers disagree as to who the primary should be! + throw new ArgumentException( + $"{nameof(RemoteClockImporter)} treating remote machine ({machine}) as the primary clock source, but another {nameof(RemoteClockImporter)} " + + $"is treating a different remote machine ({PrimaryClockSourceMachineName}) as the primary. " + + $"There may be only one machine hosting the primary clock. Check {nameof(RemoteClockImporter)} configurations."); + } + + // synchronize pipeline clock + this.pipeline.VirtualTimeOffset = timeOffset; + this.connected.Set(); + completed = true; + } + catch (Exception ex) + { + Trace.WriteLine($"{nameof(RemoteClockImporter)} Exception: {ex.Message}"); + } + finally + { + networkStream?.Dispose(); + this.client.Close(); + } + } + } + } +} diff --git a/Sources/Runtime/Microsoft.Psi/Remoting/RemoteExporter.cs b/Sources/Runtime/Microsoft.Psi/Remoting/RemoteExporter.cs index 066b308d9..0935318d6 100644 --- a/Sources/Runtime/Microsoft.Psi/Remoting/RemoteExporter.cs +++ b/Sources/Runtime/Microsoft.Psi/Remoting/RemoteExporter.cs @@ -25,12 +25,15 @@ public sealed class RemoteExporter : IDisposable private const TransportKind DefaultTransport = TransportKind.NamedPipes; private readonly int port; - private ConcurrentDictionary connections = new ConcurrentDictionary(); - private ITransport dataTransport; - private long maxBytesPerSecond; - private double bytesPerSecondSmoothingWindowSeconds; - private string name; - private string path; + private readonly TransportKind transport; + private readonly string name; + private readonly string path; + private readonly long maxBytesPerSecond; + private readonly TcpListener metaListener; + private readonly ITransport dataTransport; + private readonly double bytesPerSecondSmoothingWindowSeconds; + + private ConcurrentDictionary connections = new (); private bool disposed = false; private Thread metaClientThread; private Thread dataClientThread; @@ -38,20 +41,20 @@ public sealed class RemoteExporter : IDisposable /// /// Initializes a new instance of the class. /// - /// Pipeline to which to attach. + /// The pipeline to add the component to. /// TCP port on which to listen (default 11411). /// Transport kind to use. /// Maximum bytes/sec quota (default infinite). /// Smoothing window over which to compute bytes/sec (default 5 sec.). public RemoteExporter(Pipeline pipeline, int port = DefaultPort, TransportKind transport = DefaultTransport, long maxBytesPerSecond = long.MaxValue, double bytesPerSecondSmoothingWindowSeconds = 5.0) - : this(PsiStore.Create(pipeline, $"RemoteExporter_{Guid.NewGuid().ToString()}", null, true), port, transport, maxBytesPerSecond, bytesPerSecondSmoothingWindowSeconds) + : this(PsiStore.Create(pipeline, $"RemoteExporter_{Guid.NewGuid()}", null, true), port, transport, maxBytesPerSecond, bytesPerSecondSmoothingWindowSeconds) { } /// /// Initializes a new instance of the class. /// - /// Pipeline to which to attach. + /// The pipeline to add the component to. /// Transport kind to use. /// Maximum bytes/sec quota (default infinite). /// Smoothing window over which to compute bytes/sec (default 5 sec.). @@ -92,6 +95,9 @@ public RemoteExporter(Exporter exporter, int port = DefaultPort, TransportKind t : this(exporter.Name, exporter.Path, port, transport, maxBytesPerSecond, bytesPerSecondSmoothingWindowSeconds) { this.Exporter = exporter; + + // add this as a node in the exporter so that it gets disposed + exporter.GetOrCreateNode(this); } /// @@ -106,6 +112,9 @@ public RemoteExporter(Importer importer, int port = DefaultPort, TransportKind t : this(importer.StoreName, importer.StorePath, port, transport, maxBytesPerSecond, bytesPerSecondSmoothingWindowSeconds) { // used to remote an existing store. this.Exporter remains null + + // add this as a node in the importer so that it gets disposed + importer.GetOrCreateNode(this); } private RemoteExporter(string name, string path, int port, TransportKind transport, long maxBytesPerSecond, double bytesPerSecondSmoothingWindowSeconds) @@ -113,6 +122,8 @@ private RemoteExporter(string name, string path, int port, TransportKind transpo this.name = name; this.path = path; this.port = port; + this.transport = transport; + this.metaListener = new TcpListener(IPAddress.Any, this.port); this.dataTransport = Transport.TransportOfKind(transport); this.maxBytesPerSecond = maxBytesPerSecond; this.bytesPerSecondSmoothingWindowSeconds = bytesPerSecondSmoothingWindowSeconds; @@ -123,6 +134,16 @@ private RemoteExporter(string name, string path, int port, TransportKind transpo this.dataClientThread.Start(); } + /// + /// Gets the TCP port being used. + /// + public int Port => this.port; + + /// + /// Gets the transport being used. + /// + public TransportKind TransportKind => this.transport; + /// /// Gets exporter being remoted. /// @@ -143,6 +164,7 @@ public void Dispose() this.metaClientThread = null; this.dataClientThread = null; + this.metaListener.Stop(); this.dataTransport.Dispose(); } @@ -154,21 +176,9 @@ private void AddConnection(Connection connection) } } - private Connection GetConnection(Guid id) - { - Connection connection; - if (!this.connections.TryGetValue(id, out connection)) - { - throw new ArgumentException($"Remoting connection does not exist (ID={id})"); - } - - return connection; - } - private void RemoveConnection(Guid id) { - Connection ignore; - if (!this.connections.TryRemove(id, out ignore)) + if (!this.connections.TryRemove(id, out _)) { throw new ArgumentException($"Remoting connection could not be removed (ID={id})"); } @@ -176,54 +186,66 @@ private void RemoveConnection(Guid id) private void AcceptMetaClientsBackground() { - var metaListener = new TcpListener(IPAddress.Any, this.port); - metaListener.Start(); - while (!this.disposed) + try { - var client = metaListener.AcceptTcpClient(); - Connection connection = null; - try + this.metaListener.Start(); + while (!this.disposed) { - connection = new Connection(client, this.dataTransport, this.name, this.path, this.RemoveConnection, this.Exporter, this.maxBytesPerSecond, this.bytesPerSecondSmoothingWindowSeconds); - this.AddConnection(connection); - connection.Connect(); - Trace.WriteLine($"RemoteExporter meta client accepted (ID={connection.Id})"); - } - catch (Exception ex) - { - Trace.TraceError($"RemoteExporter meta connection error (Message={ex.Message}, ID={connection?.Id})"); - client.Dispose(); + var client = this.metaListener.AcceptTcpClient(); + Connection connection = null; + try + { + connection = new Connection(client, this.dataTransport, this.name, this.path, this.RemoveConnection, this.Exporter, this.maxBytesPerSecond, this.bytesPerSecondSmoothingWindowSeconds); + this.AddConnection(connection); + connection.Connect(); + Trace.WriteLine($"RemoteExporter meta client accepted (ID={connection.Id})"); + } + catch (Exception ex) + { + Trace.TraceError($"RemoteExporter meta connection error (Message={ex.Message}, ID={connection?.Id})"); + client.Dispose(); + } } } + catch (SocketException se) + { + Trace.TraceError($"RemoteExporter meta listener error (Message={se.Message})"); + } } private void AcceptDataClientsBackground() { - this.dataTransport.StartListening(); - while (!this.disposed) + try { - var client = this.dataTransport.AcceptClient(); - var guid = Guid.Empty; - try + this.dataTransport.StartListening(); + while (!this.disposed) { - guid = client.ReadSessionId(); - Trace.WriteLine($"RemoteExporter data client accepted (ID={guid})"); - - Connection connection; - if (this.connections.TryGetValue(guid, out connection)) + var client = this.dataTransport.AcceptClient(); + var guid = Guid.Empty; + try { - connection.JoinBackground(client); + guid = client.ReadSessionId(); + Trace.WriteLine($"RemoteExporter data client accepted (ID={guid})"); + + if (this.connections.TryGetValue(guid, out Connection connection)) + { + connection.JoinBackground(client); + } + else + { + throw new IOException($"RemoteExporter error: Invalid remoting connection ID: {guid}"); + } } - else + catch (Exception ex) { - throw new IOException($"RemoteExporter error: Invalid remoting connection ID: {guid}"); + Trace.TraceError($"RemoteExporter data connection error (Message={ex.Message}, ID={guid})"); + client.Dispose(); } } - catch (Exception ex) - { - Trace.TraceError($"RemoteExporter data connection error (Message={ex.Message}, ID={guid})"); - client.Dispose(); - } + } + catch (SocketException se) + { + Trace.TraceError($"RemoteExporter data transport error (Message={se.Message})"); } } @@ -264,22 +286,21 @@ public void Connect() { try { + // check client version var buffer = new byte[128]; - var length = sizeof(short) + sizeof(long) + sizeof(long); // version, start ticks, end ticks - for (var i = 0; i < length;) - { - i += this.stream.Read(buffer, i, length - i); - } - + Transport.Read(buffer, sizeof(short), this.stream); var reader = new BufferReader(buffer); - - // check client version var version = reader.ReadInt16(); if (version != ProtocolVersion) { throw new IOException($"Unsupported remoting protocol version: {version}"); } + // get replay info + var length = sizeof(long) + sizeof(long); // start ticks, end ticks + Transport.Read(buffer, length, this.stream); + reader.Reset(); + // get replay interval var startTicks = reader.ReadInt64(); if (startTicks == -1) @@ -316,7 +337,6 @@ public void JoinBackground(ITransportClient client) double avgBytesPerSec = 0; var lastTime = DateTime.MinValue; var buffer = new byte[0]; - Envelope envelope; long envelopeSize; unsafe { @@ -327,7 +347,7 @@ public void JoinBackground(ITransportClient client) while (true) { - if (this.storeReader.MoveNext(out envelope)) + if (this.storeReader.MoveNext(out Envelope envelope)) { var length = this.storeReader.Read(ref buffer); this.exporter.Throttle.Reset(); diff --git a/Sources/Runtime/Microsoft.Psi/Remoting/RemoteImporter.cs b/Sources/Runtime/Microsoft.Psi/Remoting/RemoteImporter.cs index 0b7d548df..802b8d24c 100644 --- a/Sources/Runtime/Microsoft.Psi/Remoting/RemoteImporter.cs +++ b/Sources/Runtime/Microsoft.Psi/Remoting/RemoteImporter.cs @@ -11,66 +11,64 @@ namespace Microsoft.Psi.Remoting using Microsoft.Psi.Common; using Microsoft.Psi.Data; using Microsoft.Psi.Persistence; + using Microsoft.Psi.Serialization; /// /// Importer for remoting over network transport. /// public sealed class RemoteImporter : IDisposable { - private const long DateTimeNowWindow = 1000; - private readonly Func importerThunk; private readonly long replayEnd; private readonly string host; private readonly int port; private readonly bool allowSequenceRestart; - private readonly EventWaitHandle connected = new EventWaitHandle(false, EventResetMode.ManualReset); + private readonly EventWaitHandle connected = new (false, EventResetMode.ManualReset); + + private readonly bool replayRemoteLatestStart; // special replayStart of `DateTime.UtcNow` at exporter side + private readonly Dictionary lastSequenceIdPerStream = new (); private PsiStoreWriter storeWriter; - private bool replayRemoteLatestStart; // special replayStart of `DateTime.UtcNow` at exporter side private long replayStart; // advanced upon each message for restart - private Dictionary lastSequenceIdPerStream = new Dictionary(); - private Thread metaClientThread; - private Thread dataClientThread; private bool disposed; /// /// Initializes a new instance of the class. /// - /// Pipeline to which to attach. + /// The pipeline to add the component to. /// Time interval to be replayed from remote source. /// Remote host name. /// TCP port on which to connect (default 11411). /// Whether to allow sequence ID restarts upon connection loss/reacquire. public RemoteImporter(Pipeline pipeline, TimeInterval replay, string host, int port = RemoteExporter.DefaultPort, bool allowSequenceRestart = true) - : this(name => PsiStore.Open(pipeline, name, null), replay, false, host, port, $"RemoteImporter_{Guid.NewGuid().ToString()}", null, allowSequenceRestart) + : this(name => PsiStore.Open(pipeline, name, null), replay, false, host, port, $"RemoteImporter_{Guid.NewGuid()}", null, allowSequenceRestart) { } /// /// Initializes a new instance of the class. /// - /// Pipeline to which to attach. + /// The pipeline to add the component to. /// End of time interval to be replayed from remote. /// Remote host name. /// TCP port on which to connect (default 11411). /// Whether to allow sequence ID restarts upon connection loss/reacquire. /// In this case the start is a special behavior that is `DateTime.UtcNow` _at the sending `RemoteExporter`_. public RemoteImporter(Pipeline pipeline, DateTime replayEnd, string host, int port = RemoteExporter.DefaultPort, bool allowSequenceRestart = true) - : this(name => PsiStore.Open(pipeline, name, null), new TimeInterval(DateTime.MinValue, replayEnd), true, host, port, $"RemoteImporter_{Guid.NewGuid().ToString()}", null, allowSequenceRestart) + : this(name => PsiStore.Open(pipeline, name, null), new TimeInterval(DateTime.MinValue, replayEnd), true, host, port, $"RemoteImporter_{Guid.NewGuid()}", null, allowSequenceRestart) { } /// /// Initializes a new instance of the class. /// - /// Pipeline to which to attach. + /// The pipeline to add the component to. /// Remote host name. /// TCP port on which to connect (default 11411). /// Whether to allow sequence ID restarts upon connection loss/reacquire. /// In this case the start is a special behavior that is `DateTime.UtcNow` _at the sending `RemoteExporter`_. public RemoteImporter(Pipeline pipeline, string host, int port = RemoteExporter.DefaultPort, bool allowSequenceRestart = true) - : this(name => PsiStore.Open(pipeline, name, null), new TimeInterval(DateTime.MinValue, DateTime.MaxValue), true, host, port, $"RemoteImporter_{Guid.NewGuid().ToString()}", null, allowSequenceRestart) + : this(name => PsiStore.Open(pipeline, name, null), new TimeInterval(DateTime.MinValue, DateTime.MaxValue), true, host, port, $"RemoteImporter_{Guid.NewGuid()}", null, allowSequenceRestart) { } @@ -147,8 +145,6 @@ public EventWaitHandle Connected public void Dispose() { this.disposed = true; - this.metaClientThread = null; - this.dataClientThread = null; this.storeWriter.Dispose(); this.storeWriter = null; @@ -167,7 +163,6 @@ private void StartMetaClient() this.connected.Reset(); var thread = new Thread(new ThreadStart(this.MetaClientBackground)) { IsBackground = true }; thread.Start(); - this.metaClientThread = thread; // assign only after successful start in case of abort by data thread } private void MetaClientBackground() @@ -198,7 +193,7 @@ private void MetaClientBackground() var transport = Transport.TransportOfName(reader.ReadString()); transport.ReadTransportParams(reader); guid = new Guid(id); - Trace.WriteLine($"RemoteImporter meta client connected (ID={guid})"); + Trace.WriteLine($"{nameof(RemoteImporter)} meta client connected (ID={guid})"); // process metadata updates while (!this.disposed) @@ -219,15 +214,23 @@ private void MetaClientBackground() } catch (Exception ex) { - Trace.TraceError($"RemoteImporter meta update duplicate stream - expected after reconnect (Name={meta.Name}, ID={guid}, Error={ex.Message})"); + Trace.TraceError($"{nameof(RemoteImporter)} meta update duplicate stream - expected after reconnect (Name={meta.Name}, ID={guid}, Error={ex.Message})"); } } + else if (meta.Kind == MetadataKind.RuntimeInfo) + { + this.storeWriter.WriteToCatalog((RuntimeInfo)meta); + } + else if (meta.Kind == MetadataKind.TypeSchema) + { + this.storeWriter.WriteToCatalog((TypeSchema)meta); + } else { - this.storeWriter.WriteToCatalog(meta); + throw new NotSupportedException("Unknown metadata kind."); } - Trace.WriteLine($"RemoteImporter meta update (Name={meta.Name}, ID={guid})"); + Trace.WriteLine($"{nameof(RemoteImporter)} meta update (Name={meta.Name}, ID={guid})"); } else { @@ -239,9 +242,13 @@ private void MetaClientBackground() } } } + catch (ArgumentException) + { + throw; + } catch (Exception ex) { - Trace.WriteLine($"RemoteImporter meta connection error (Message={ex.Message}, ID={guid})"); + Trace.WriteLine($"{nameof(RemoteImporter)} meta connection error (Message={ex.Message}, ID={guid})"); this.StartMetaClient(); // restart } } @@ -293,12 +300,11 @@ private void StartDataClient(Guid id, ITransport transport) } catch (Exception ex) { - Trace.WriteLine($"RemoteImporter data connection error (Message={ex.Message}, ID={id})"); + Trace.WriteLine($"{nameof(RemoteImporter)} data connection error (Message={ex.Message}, ID={id})"); dataClient.Dispose(); } })) { IsBackground = true }; thread.Start(); - this.dataClientThread = thread; } } } diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/ArraySerializer.cs b/Sources/Runtime/Microsoft.Psi/Serialization/ArraySerializer.cs index e6322d62a..d1ae80760 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/ArraySerializer.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/ArraySerializer.cs @@ -16,6 +16,9 @@ internal sealed class ArraySerializer : ISerializer private SerializationHandler elementHandler; + /// + public bool? IsClearRequired => true; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { var type = typeof(T[]); @@ -72,7 +75,7 @@ public void Clear(ref T[] target, SerializationContext context) private void PrepareTarget(ref T[] target, int size, SerializationContext context) { - if (target != null && target.Length > size) + if (target != null && target.Length > size && (!this.elementHandler.IsClearRequired.HasValue || this.elementHandler.IsClearRequired.Value)) { // use a separate context to clear the unused objects, so that we don't corrupt the current context SerializationContext clearContext = new SerializationContext(context.Serializers); diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/BufferSerializer.cs b/Sources/Runtime/Microsoft.Psi/Serialization/BufferSerializer.cs index 85aac8712..09483c0b9 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/BufferSerializer.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/BufferSerializer.cs @@ -14,6 +14,9 @@ internal sealed class BufferSerializer : ISerializer { private const int Version = 2; + /// + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { serializers.GetHandler(); // register element type diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/ByteArraySerializer.cs b/Sources/Runtime/Microsoft.Psi/Serialization/ByteArraySerializer.cs index 298149450..7a5922a3a 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/ByteArraySerializer.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/ByteArraySerializer.cs @@ -17,6 +17,9 @@ internal sealed class ByteArraySerializer : ISerializer { private const int Version = 2; + /// + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { serializers.GetHandler(); // register element type diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/ClassSerializer.cs b/Sources/Runtime/Microsoft.Psi/Serialization/ClassSerializer.cs index 48c4abb8e..11583a31b 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/ClassSerializer.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/ClassSerializer.cs @@ -33,6 +33,9 @@ public ClassSerializer() { } + /// + public bool? IsClearRequired => null; // depends on the generated implementation + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { var runtimeSchema = TypeSchema.FromType(typeof(T), serializers.RuntimeVersion, this.GetType(), Version); diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/EnumerableSerializer.cs b/Sources/Runtime/Microsoft.Psi/Serialization/EnumerableSerializer.cs index 400d76540..405cdbf26 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/EnumerableSerializer.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/EnumerableSerializer.cs @@ -18,6 +18,9 @@ internal sealed class EnumerableSerializer : ISerializer> private SerializationHandler elementHandler; + /// + public bool? IsClearRequired => true; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { this.elementHandler = serializers.GetHandler(); // register element type @@ -84,7 +87,7 @@ public void Clear(ref IEnumerable target, SerializationContext context) private void PrepareTarget(ref T[] target, int size, SerializationContext context) { - if (target != null && target.Length > size) + if (target != null && target.Length > size && (!this.elementHandler.IsClearRequired.HasValue || this.elementHandler.IsClearRequired.Value)) { // use a separate context to clear the unused objects, so that we don't corrupt the current context SerializationContext clearContext = new SerializationContext(context.Serializers); diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/ISerializer.cs b/Sources/Runtime/Microsoft.Psi/Serialization/ISerializer.cs index eb217653e..210ad7cf6 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/ISerializer.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/ISerializer.cs @@ -21,6 +21,11 @@ namespace Microsoft.Psi.Serialization /// The type of objects the serializer knows how to handle. public interface ISerializer { + /// + /// Gets a value indicating whether cached instances must be cleared (null if statically unknown). + /// + bool? IsClearRequired { get; } + /// /// Initializes the serializer with the type schema and target object schema to use. /// diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/ImmutableSerializer.cs b/Sources/Runtime/Microsoft.Psi/Serialization/ImmutableSerializer.cs index d70aa80dd..e9c1f796a 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/ImmutableSerializer.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/ImmutableSerializer.cs @@ -20,6 +20,9 @@ public ImmutableSerializer() { } + /// + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { var runtimeSchema = TypeSchema.FromType(typeof(T), serializers.RuntimeVersion, this.GetType(), Version); diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/KnownSerializers.cs b/Sources/Runtime/Microsoft.Psi/Serialization/KnownSerializers.cs index 593fcd7de..debf7def9 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/KnownSerializers.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/KnownSerializers.cs @@ -216,7 +216,7 @@ private KnownSerializers(bool isDefault, RuntimeInfo runtimeVersion) /// Optional flags that control the cloning behavior for this type. public void Register(Type type, string contractName, CloningFlags cloningFlags = CloningFlags.None) { - contractName = contractName ?? TypeSchema.GetContractName(type, this.runtimeVersion); + contractName ??= TypeSchema.GetContractName(type, this.runtimeVersion); if (this.knownTypes.TryGetValue(contractName, out Type existingType) && existingType != type) { throw new SerializationException($"Cannot register type {type.AssemblyQualifiedName} under the contract name {contractName} because the type {existingType.AssemblyQualifiedName} is already registered under the same name."); @@ -317,7 +317,7 @@ internal void RegisterSchema(TypeSchema schema) return; } - if (schema.IsPartial && this.schemasById.TryGetValue(id, out TypeSchema otherSchema)) + if (schema.IsPartial && this.schemasById.TryGetValue(id, out _)) { // schema is already registered return; @@ -448,14 +448,13 @@ private SerializationHandler AddHandler() SerializationHandler handler = null; var type = typeof(T); ISerializer serializer = null; - TypeSchema schema = null; if (!this.knownNames.TryGetValue(type, out string name)) { name = TypeSchema.GetContractName(type, this.runtimeVersion); } - if (!this.schemas.TryGetValue(name, out schema)) + if (!this.schemas.TryGetValue(name, out TypeSchema schema)) { // try to match to an existing schema without assembly/version info string typeName = TypeResolutionHelper.RemoveAssemblyName(type.AssemblyQualifiedName); @@ -540,9 +539,8 @@ private SerializationHandler AddHandler() private ISerializer CreateSerializer() { var type = typeof(T); - Type serializerType; - if (this.serializers.TryGetValue(type, out serializerType)) + if (this.serializers.TryGetValue(type, out Type serializerType)) { return (ISerializer)Activator.CreateInstance(serializerType); } diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/MemoryStreamSerializer.cs b/Sources/Runtime/Microsoft.Psi/Serialization/MemoryStreamSerializer.cs index 112e43f1d..9b77d5857 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/MemoryStreamSerializer.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/MemoryStreamSerializer.cs @@ -15,6 +15,9 @@ internal sealed class MemoryStreamSerializer : ISerializer private const int SchemaVersion = 3; private ISerializer innerSerializer; + /// + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { if (targetSchema?.Version <= 2) @@ -73,6 +76,9 @@ public void Clear(ref MemoryStream target, SerializationContext context) /// private class MemoryStreamSerializerImpl : ISerializer { + /// + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { var schemaMembers = new[] { new TypeMemberSchema("buffer", typeof(byte[]).AssemblyQualifiedName, true) }; diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/NonSerializer.cs b/Sources/Runtime/Microsoft.Psi/Serialization/NonSerializer.cs index c584351d4..7e482b24b 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/NonSerializer.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/NonSerializer.cs @@ -14,35 +14,45 @@ internal class NonSerializer : ISerializer { private const int Version = 0; + /// + public bool? IsClearRequired => false; + + /// public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { return null; } + /// public void Clone(T instance, ref T target, SerializationContext context) { } + /// public void Serialize(BufferWriter writer, T instance, SerializationContext context) { throw new NotSupportedException($"Serialization is not supported for type: {typeof(T).AssemblyQualifiedName}"); } + /// public void Deserialize(BufferReader reader, ref T target, SerializationContext context) { throw new NotSupportedException($"Deserialization is not supported for type: {typeof(T).AssemblyQualifiedName}"); } + /// public void PrepareDeserializationTarget(BufferReader reader, ref T target, SerializationContext context) { throw new NotSupportedException($"Deserialization is not supported for type: {typeof(T).AssemblyQualifiedName}"); } + /// public void PrepareCloningTarget(T instance, ref T target, SerializationContext context) { target = instance; } + /// public void Clear(ref T target, SerializationContext context) { } diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/RefHandler.cs b/Sources/Runtime/Microsoft.Psi/Serialization/RefHandler.cs index 41575138d..54230fa36 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/RefHandler.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/RefHandler.cs @@ -24,6 +24,10 @@ public RefHandler(ISerializer innerSerializer, string contractName, int id) : base(contractName, id) { this.innerSerializer = innerSerializer; + if (innerSerializer != null && innerSerializer.IsClearRequired.HasValue) + { + this.IsClearRequired = innerSerializer.IsClearRequired.Value; + } if (typeof(T).IsValueType) { diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/SimpleArraySerializer.cs b/Sources/Runtime/Microsoft.Psi/Serialization/SimpleArraySerializer.cs index df0266910..f9b8396cf 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/SimpleArraySerializer.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/SimpleArraySerializer.cs @@ -23,6 +23,9 @@ internal sealed class SimpleArraySerializer : ISerializer private static readonly SerializeDelegate SerializeFn = Generator.GenerateSerializeMethod(il => Generator.EmitPrimitiveArraySerialize(typeof(T), il)); private static readonly DeserializeDelegate DeserializeFn = Generator.GenerateDeserializeMethod(il => Generator.EmitPrimitiveArrayDeserialize(typeof(T), il)); + /// + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { serializers.GetHandler(); // register element type diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/SimpleSerializer.cs b/Sources/Runtime/Microsoft.Psi/Serialization/SimpleSerializer.cs index eef0a21b4..be1c93e7c 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/SimpleSerializer.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/SimpleSerializer.cs @@ -15,6 +15,9 @@ internal sealed class SimpleSerializer : ISerializer private SerializeDelegate serializeImpl; private DeserializeDelegate deserializeImpl; + /// + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { var schema = TypeSchema.FromType(typeof(T), serializers.RuntimeVersion, this.GetType(), Version); diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/StringArraySerializer.cs b/Sources/Runtime/Microsoft.Psi/Serialization/StringArraySerializer.cs index 1787175e2..7a77b1a74 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/StringArraySerializer.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/StringArraySerializer.cs @@ -13,6 +13,9 @@ internal sealed class StringArraySerializer : ISerializer { private const int Version = 2; + /// + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { serializers.GetHandler(); // register element type diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/StringSerializer.cs b/Sources/Runtime/Microsoft.Psi/Serialization/StringSerializer.cs index 07ad9acec..5c0e6cd5f 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/StringSerializer.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/StringSerializer.cs @@ -12,6 +12,9 @@ internal sealed class StringSerializer : ISerializer { private const int Version = 0; + /// + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { return targetSchema ?? TypeSchema.FromType(typeof(string), serializers.RuntimeVersion, this.GetType(), Version); diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/StructHandler.cs b/Sources/Runtime/Microsoft.Psi/Serialization/StructHandler.cs index 3a15d6ca5..b262eca8b 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/StructHandler.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/StructHandler.cs @@ -24,6 +24,10 @@ public StructHandler(ISerializer innerSerializer, string contractName, int id : base(contractName, id) { this.innerSerializer = innerSerializer; + if (innerSerializer != null && innerSerializer.IsClearRequired.HasValue) + { + this.IsClearRequired = innerSerializer.IsClearRequired.Value; + } if (typeof(T).IsByRef) { diff --git a/Sources/Runtime/Microsoft.Psi/Serialization/StructSerializer.cs b/Sources/Runtime/Microsoft.Psi/Serialization/StructSerializer.cs index 2445778f5..412e5ebfc 100644 --- a/Sources/Runtime/Microsoft.Psi/Serialization/StructSerializer.cs +++ b/Sources/Runtime/Microsoft.Psi/Serialization/StructSerializer.cs @@ -21,6 +21,9 @@ internal sealed class StructSerializer : ISerializer private CloneDelegate cloneImpl; private ClearDelegate clearImpl; + /// + public bool? IsClearRequired => null; // depends on the generated implementation + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { var runtimeSchema = TypeSchema.FromType(typeof(T), serializers.RuntimeVersion, this.GetType(), Version); diff --git a/Sources/Runtime/Microsoft.Psi/Streams/Emitter{T}.cs b/Sources/Runtime/Microsoft.Psi/Streams/Emitter{T}.cs index 6a7ac7cd3..75ab36cf6 100644 --- a/Sources/Runtime/Microsoft.Psi/Streams/Emitter{T}.cs +++ b/Sources/Runtime/Microsoft.Psi/Streams/Emitter{T}.cs @@ -23,12 +23,12 @@ public sealed class Emitter : IEmitter, IProducer private readonly object owner; private readonly Pipeline pipeline; private readonly int id; - private readonly List closedHandlers = new List(); + private readonly List closedHandlers = new (); private readonly ValidateMessageHandler messageValidator; - private readonly object receiversLock = new object(); + private readonly object receiversLock = new (); + private readonly SynchronizationLock syncContext; private string name; private int nextSeqId; - private SynchronizationLock syncContext; private Envelope lastEnvelope; private volatile Receiver[] receivers = new Receiver[0]; private IPerfCounterCollection counters; @@ -128,7 +128,7 @@ public void Close(DateTime originatingTime) { var e = this.CreateEnvelope(originatingTime); e.SequenceId = int.MaxValue; // special "closing" ID - this.Deliver(new Message(default(T), e)); + this.Deliver(new Message(default, e)); lock (this.receiversLock) { @@ -235,12 +235,30 @@ private void Validate(T data, Envelope e) // make sure the data is consistent if (e.SequenceId <= this.lastEnvelope.SequenceId) { - throw new InvalidOperationException($"Attempted to post a message with a sequence ID that is out of order: {this.Name}\nThis may be caused by simultaneous calls to Emitter.Post() from multiple threads."); + throw new InvalidOperationException( + $"Attempted to post a message with a sequence ID that is out of order.\n" + + $"This may be caused by simultaneous calls to Emitter.Post() from multiple threads.\n" + + $"Emitter: {this.Name}\n" + + $"Current message sequence ID: {e.SequenceId}\n" + + $"Previous message sequence ID: {this.lastEnvelope.SequenceId}\n"); } - if (e.OriginatingTime <= this.lastEnvelope.OriginatingTime || e.CreationTime < this.lastEnvelope.CreationTime) + if (e.OriginatingTime <= this.lastEnvelope.OriginatingTime) { - throw new InvalidOperationException($"Attempted to post a message without strictly increasing originating time or that is out of order in wall-clock time: {this.Name}"); + throw new InvalidOperationException( + $"Attempted to post a message without strictly increasing originating times.\n" + + $"Emitter: {this.Name}\n" + + $"Current message originating time: {e.OriginatingTime.TimeOfDay}\n" + + $"Previous message originating time: {this.lastEnvelope.OriginatingTime.TimeOfDay}\n"); + } + + if (e.CreationTime < this.lastEnvelope.CreationTime) + { + throw new InvalidOperationException( + $"Attempted to post a message that is out of order in wall-clock time.\n" + + $"Emitter: {this.Name}\n" + + $"Current message creation time: {e.CreationTime.TimeOfDay}\n" + + $"Previous message creation time: {this.lastEnvelope.CreationTime.TimeOfDay}\n"); } // additional message validation checks diff --git a/Sources/Runtime/Test.Psi/CustomSerializationTester.cs b/Sources/Runtime/Test.Psi/CustomSerializationTester.cs index 68bde3cb7..03bf060e5 100644 --- a/Sources/Runtime/Test.Psi/CustomSerializationTester.cs +++ b/Sources/Runtime/Test.Psi/CustomSerializationTester.cs @@ -63,6 +63,8 @@ public class TestCustomSerializer : ISerializer { public int Version => throw new NotSupportedException(); + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { return null; diff --git a/Sources/Runtime/Test.Psi/EmitterTester.cs b/Sources/Runtime/Test.Psi/EmitterTester.cs index 93fdef642..af2becc3e 100644 --- a/Sources/Runtime/Test.Psi/EmitterTester.cs +++ b/Sources/Runtime/Test.Psi/EmitterTester.cs @@ -167,6 +167,78 @@ public void ValidateMessages() } } + [TestMethod] + [Timeout(60000)] + public void DeliverOutOfOrderSequenceIdsShouldThrow() + { + var exceptionThrown = false; + try + { + using (var p = Pipeline.Create()) + { + var time = DateTime.UtcNow; + var emitter = p.CreateEmitter(this, "test"); + emitter.Deliver(123, new Envelope(time, time, emitter.Id, 2)); + emitter.Deliver(456, new Envelope(time.AddTicks(1), time.AddTicks(1), emitter.Id, 1)); // this should fail (posting with out of order sequence ID) + } + } + catch (InvalidOperationException ex) + { + exceptionThrown = true; + Assert.IsTrue(ex.Message.StartsWith("Attempted to post a message with a sequence ID that is out of order")); + } + + Assert.IsTrue(exceptionThrown); + } + + [TestMethod] + [Timeout(60000)] + public void DeliverOutOfOrderOriginatingTimesShouldThrow() + { + var exceptionThrown = false; + try + { + using (var p = Pipeline.Create()) + { + var time = DateTime.UtcNow; + var emitter = p.CreateEmitter(this, "test"); + emitter.Deliver(123, new Envelope(time, time, emitter.Id, 2)); + emitter.Deliver(456, new Envelope(time, time.AddTicks(1), emitter.Id, 3)); // this should fail (posting with non-increasing originating times) + } + } + catch (InvalidOperationException ex) + { + exceptionThrown = true; + Assert.IsTrue(ex.Message.StartsWith("Attempted to post a message without strictly increasing originating times")); + } + + Assert.IsTrue(exceptionThrown); + } + + [TestMethod] + [Timeout(60000)] + public void DeliverOutOfOrderCreationTimesShouldThrow() + { + var exceptionThrown = false; + try + { + using (var p = Pipeline.Create()) + { + var time = DateTime.UtcNow; + var emitter = p.CreateEmitter(this, "test"); + emitter.Deliver(123, new Envelope(time, time, emitter.Id, 2)); + emitter.Deliver(456, new Envelope(time.AddTicks(1), time.AddTicks(-1), emitter.Id, 3)); // this should fail (posting with out of order creation times) + } + } + catch (InvalidOperationException ex) + { + exceptionThrown = true; + Assert.IsTrue(ex.Message.StartsWith("Attempted to post a message that is out of order in wall-clock time")); + } + + Assert.IsTrue(exceptionThrown); + } + #if DEBUG [TestMethod] [Timeout(60000)] diff --git a/Sources/Runtime/Test.Psi/FunctionalTests.cs b/Sources/Runtime/Test.Psi/FunctionalTests.cs index 30565bbca..cd3a0c334 100644 --- a/Sources/Runtime/Test.Psi/FunctionalTests.cs +++ b/Sources/Runtime/Test.Psi/FunctionalTests.cs @@ -406,6 +406,8 @@ private class CustomSerializer : ISerializer> { public int Version => 1; + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { return null; diff --git a/Sources/Runtime/Test.Psi/JoinTests.cs b/Sources/Runtime/Test.Psi/JoinTests.cs index 4e7b05d66..6fd1f8d58 100644 --- a/Sources/Runtime/Test.Psi/JoinTests.cs +++ b/Sources/Runtime/Test.Psi/JoinTests.cs @@ -17,43 +17,42 @@ public class JoinTests [Timeout(60000)] public void JoinClosingSecondary() { - using (var p = Pipeline.Create()) - { - // primary 0 1 2 3 4 5 6 7 8 9 - // secondary 0 1 2 3 4 5 6 7 8 9 - // joined (0,0) (1,2) (2,4) (3,6) (4,8) (5,9) (6,9) (7,9) (8,9) (9,9) - // ^ ^ ^ ^ ^ - // note: normally these would remain unpaired - // until seeing next secondary message - var primary = Generators.Range(p, 0, 10, TimeSpan.FromMilliseconds(100)); - var secondary = Generators.Range(p, 0, 10, TimeSpan.FromMilliseconds(50)); - var joined = primary.Join(secondary, RelativeTimeInterval.Infinite); - var results = joined.Select(x => $"{x.Item1},{x.Item2}").ToObservable().ToListObservable(); - p.Run(); - - Assert.IsTrue(Enumerable.SequenceEqual(new[] { "0,0", "1,2", "2,4", "3,6", "4,8", "5,9", "6,9", "7,9", "8,9", "9,9" }, results)); - } + using var p = Pipeline.Create(); + + // primary 0 1 2 3 4 5 6 7 8 9 + // secondary 0 1 2 3 4 5 6 7 8 9 + // joined (0,0) (1,2) (2,4) (3,6) (4,8) (5,9) (6,9) (7,9) (8,9) (9,9) + // ^ ^ ^ ^ ^ + // note: normally these would remain unpaired + // until seeing next secondary message + var primary = Generators.Range(p, 0, 10, TimeSpan.FromMilliseconds(100)); + var secondary = Generators.Range(p, 0, 10, TimeSpan.FromMilliseconds(50)); + var joined = primary.Join(secondary, RelativeTimeInterval.Infinite); + var results = joined.Select(x => $"{x.Item1},{x.Item2}").ToObservable().ToListObservable(); + p.Run(); + + Assert.IsTrue(Enumerable.SequenceEqual(new[] { "0,0", "1,2", "2,4", "3,6", "4,8", "5,9", "6,9", "7,9", "8,9", "9,9" }, results)); } [TestMethod] [Timeout(60000)] public void DynamicJoinClosingSecondaryOrDefault() { - using (var p = Pipeline.Create()) - { - // Setup a sequence with a parallel operator, with outputDefaultIfDropped = true, and ensure that - // the "outputDefaultIfDropped" is correctly applied while the instance substream exists, but not outside of that existance. - // This tests for making sure we are correctly tracking stream closings and the interpolator - // in Join is doing the right thing based on the stream closing times. - - // key N/A 1 1 1 1 1 1 N/A N/A N/A - // value N/A 1 2 3 4 5 6 N/A N/A N/A - // gamma-result [1 2 - 4 - -] - // out 1 2 0 4 0 0 - var input = Generators.Sequence( - p, - new List>() - { + using var p = Pipeline.Create(); + + // Setup a sequence with a parallel operator, with outputDefaultIfDropped = true, and ensure that + // the "outputDefaultIfDropped" is correctly applied while the instance substream exists, but not outside of that existance. + // This tests for making sure we are correctly tracking stream closings and the interpolator + // in Join is doing the right thing based on the stream closing times. + + // key N/A 1 1 1 1 1 1 N/A N/A N/A + // value N/A 1 2 3 4 5 6 N/A N/A N/A + // gamma-result [1 2 - 4 - -] + // out 1 2 0 4 0 0 + var input = Generators.Sequence( + p, + new List>() + { new Dictionary(), new Dictionary() { { 1, 1 } }, new Dictionary() { { 1, 2 } }, @@ -64,32 +63,31 @@ public void DynamicJoinClosingSecondaryOrDefault() new Dictionary(), new Dictionary(), new Dictionary(), - }, - TimeSpan.FromTicks(1)); + }, + TimeSpan.FromTicks(1)); - var resultsParallelOrDefault = new List(); - input.Parallel(s => s.Where(x => x != 3 && x <= 4), outputDefaultIfDropped: true).Do(d => + var resultsParallelOrDefault = new List(); + input.Parallel(s => s.Where(x => x != 3 && x <= 4), outputDefaultIfDropped: true).Do(d => + { + if (d.Count() > 0) { - if (d.Count() > 0) - { - resultsParallelOrDefault.Add(d[1]); - } - }); + resultsParallelOrDefault.Add(d[1]); + } + }); - var resultsParallel = new List(); - input.Parallel(s => s.Where(x => x != 3 && x <= 4)).Do(d => + var resultsParallel = new List(); + input.Parallel(s => s.Where(x => x != 3 && x <= 4)).Do(d => + { + if (d.Count() > 0) { - if (d.Count() > 0) - { - resultsParallel.Add(d[1]); - } - }); + resultsParallel.Add(d[1]); + } + }); - p.Run(); + p.Run(); - Assert.IsTrue(Enumerable.SequenceEqual(resultsParallel, new[] { 1, 2, 4 })); - Assert.IsTrue(Enumerable.SequenceEqual(resultsParallelOrDefault, new[] { 1, 2, 0, 4, 0, 0 })); - } + Assert.IsTrue(Enumerable.SequenceEqual(resultsParallel, new[] { 1, 2, 4 })); + Assert.IsTrue(Enumerable.SequenceEqual(resultsParallelOrDefault, new[] { 1, 2, 0, 4, 0, 0 })); } [TestMethod] @@ -186,71 +184,36 @@ public void VectorJoinWithArityOne() } } - [TestMethod] - [Timeout(60000)] - public void SparseJoin() - { - var results = new List>(); - - using (var p = Pipeline.Create()) - { - var sourceA = Generators.Sequence(p, 100, i => i + 1, 30, TimeSpan.FromTicks(10)); - var sourceB = Generators.Sequence(p, 100, i => i + 1, 10, TimeSpan.FromTicks(30)); - var sourceC = Generators.Sequence(p, 100, i => i + 1, 3, TimeSpan.FromTicks(100)); - var keyMapping = sourceA.Select(i => (i % 10 != 0) ? new Dictionary { { "zero", 0 }, { "one", 1 } } : new Dictionary { { "zero", 0 }, { "two", 2 } }); - - Operators - .Join(keyMapping, new[] { sourceA, sourceB, sourceC }, Reproducible.Nearest(TimeSpan.FromTicks(5))) - .Do(t => results.Add(t.DeepClone())); - p.Run(new ReplayDescriptor(DateTime.UtcNow, DateTime.MaxValue)); - } - - Assert.AreEqual(12, results.Count); - CollectionAssert.AreEqual(new Dictionary { { "zero", 100 }, { "two", 100 } }, results[0]); - CollectionAssert.AreEqual(new Dictionary { { "zero", 103 }, { "one", 101 } }, results[1]); - CollectionAssert.AreEqual(new Dictionary { { "zero", 106 }, { "one", 102 } }, results[2]); - CollectionAssert.AreEqual(new Dictionary { { "zero", 109 }, { "one", 103 } }, results[3]); - CollectionAssert.AreEqual(new Dictionary { { "zero", 110 }, { "two", 101 } }, results[4]); - CollectionAssert.AreEqual(new Dictionary { { "zero", 112 }, { "one", 104 } }, results[5]); - CollectionAssert.AreEqual(new Dictionary { { "zero", 115 }, { "one", 105 } }, results[6]); - CollectionAssert.AreEqual(new Dictionary { { "zero", 118 }, { "one", 106 } }, results[7]); - CollectionAssert.AreEqual(new Dictionary { { "zero", 120 }, { "two", 102 } }, results[8]); - CollectionAssert.AreEqual(new Dictionary { { "zero", 121 }, { "one", 107 } }, results[9]); - CollectionAssert.AreEqual(new Dictionary { { "zero", 124 }, { "one", 108 } }, results[10]); - CollectionAssert.AreEqual(new Dictionary { { "zero", 127 }, { "one", 109 } }, results[11]); - } - [TestMethod] [Timeout(60000)] public void TupleCollapsingJoin() { - using (var pipeline = Pipeline.Create()) - { - var range = Generators.Range(pipeline, 0, 10, TimeSpan.FromMilliseconds(10)); - var sourceA = range.Select(x => $"A{x}"); - var sourceB = range.Select(x => $"B{x}"); - var sourceC = range.Select(x => $"C{x}"); - var sourceD = range.Select(x => $"D{x}"); - var sourceE = range.Select(x => $"E{x}"); - var sourceF = range.Select(x => $"F{x}"); - var sourceG = range.Select(x => $"G{x}"); - - var tuples = - sourceA - .Join(sourceB, Reproducible.Nearest()) - .Join(sourceC, Reproducible.Nearest()) - .Join(sourceD, Reproducible.Nearest()) - .Join(sourceE, Reproducible.Nearest()) - .Join(sourceF, Reproducible.Nearest()) - .Join(sourceG, Reproducible.Nearest()) - .ToObservable().ToListObservable(); - pipeline.Run(); - - var results = tuples.AsEnumerable().ToArray(); - - Assert.IsTrue(Enumerable.SequenceEqual( - new ValueTuple[] - { + using var pipeline = Pipeline.Create(); + var range = Generators.Range(pipeline, 0, 10, TimeSpan.FromMilliseconds(10)); + var sourceA = range.Select(x => $"A{x}"); + var sourceB = range.Select(x => $"B{x}"); + var sourceC = range.Select(x => $"C{x}"); + var sourceD = range.Select(x => $"D{x}"); + var sourceE = range.Select(x => $"E{x}"); + var sourceF = range.Select(x => $"F{x}"); + var sourceG = range.Select(x => $"G{x}"); + + var tuples = + sourceA + .Join(sourceB, Reproducible.Nearest()) + .Join(sourceC, Reproducible.Nearest()) + .Join(sourceD, Reproducible.Nearest()) + .Join(sourceE, Reproducible.Nearest()) + .Join(sourceF, Reproducible.Nearest()) + .Join(sourceG, Reproducible.Nearest()) + .ToObservable().ToListObservable(); + pipeline.Run(); + + var results = tuples.AsEnumerable().ToArray(); + + Assert.IsTrue(Enumerable.SequenceEqual( + new ValueTuple[] + { ValueTuple.Create("A0", "B0", "C0", "D0", "E0", "F0", "G0"), ValueTuple.Create("A1", "B1", "C1", "D1", "E1", "F1", "G1"), ValueTuple.Create("A2", "B2", "C2", "D2", "E2", "F2", "G2"), @@ -261,40 +224,38 @@ public void TupleCollapsingJoin() ValueTuple.Create("A7", "B7", "C7", "D7", "E7", "F7", "G7"), ValueTuple.Create("A8", "B8", "C8", "D8", "E8", "F8", "G8"), ValueTuple.Create("A9", "B9", "C9", "D9", "E9", "F9", "G9"), - }, - results)); - } + }, + results)); } [TestMethod] [Timeout(60000)] public void TupleCollapsingReversedJoin() { - using (var pipeline = Pipeline.Create()) - { - var range = Generators.Range(pipeline, 0, 10, TimeSpan.FromMilliseconds(10)); - var sourceA = range.Select(x => $"A{x}"); - var sourceB = range.Select(x => $"B{x}"); - var sourceC = range.Select(x => $"C{x}"); - var sourceD = range.Select(x => $"D{x}"); - var sourceE = range.Select(x => $"E{x}"); - var sourceF = range.Select(x => $"F{x}"); - var sourceG = range.Select(x => $"G{x}"); - - var tuplesFG = sourceF.Join(sourceG); - var tuplesEFG = sourceE.Join(tuplesFG); - var tuplesDEFG = sourceD.Join(tuplesEFG); - var tuplesCDEFG = sourceC.Join(tuplesDEFG); - var tuplesBCDEFG = sourceB.Join(tuplesCDEFG); - var tuplesABCDEFG = sourceA.Join(tuplesBCDEFG); - var tuples = tuplesABCDEFG.ToObservable().ToListObservable(); - pipeline.Run(); - - var results = tuples.AsEnumerable().ToArray(); - - Assert.IsTrue(Enumerable.SequenceEqual( - new ValueTuple[] - { + using var pipeline = Pipeline.Create(); + var range = Generators.Range(pipeline, 0, 10, TimeSpan.FromMilliseconds(10)); + var sourceA = range.Select(x => $"A{x}"); + var sourceB = range.Select(x => $"B{x}"); + var sourceC = range.Select(x => $"C{x}"); + var sourceD = range.Select(x => $"D{x}"); + var sourceE = range.Select(x => $"E{x}"); + var sourceF = range.Select(x => $"F{x}"); + var sourceG = range.Select(x => $"G{x}"); + + var tuplesFG = sourceF.Join(sourceG); + var tuplesEFG = sourceE.Join(tuplesFG); + var tuplesDEFG = sourceD.Join(tuplesEFG); + var tuplesCDEFG = sourceC.Join(tuplesDEFG); + var tuplesBCDEFG = sourceB.Join(tuplesCDEFG); + var tuplesABCDEFG = sourceA.Join(tuplesBCDEFG); + var tuples = tuplesABCDEFG.ToObservable().ToListObservable(); + pipeline.Run(); + + var results = tuples.AsEnumerable().ToArray(); + + Assert.IsTrue(Enumerable.SequenceEqual( + new ValueTuple[] + { ValueTuple.Create("A0", "B0", "C0", "D0", "E0", "F0", "G0"), ValueTuple.Create("A1", "B1", "C1", "D1", "E1", "F1", "G1"), ValueTuple.Create("A2", "B2", "C2", "D2", "E2", "F2", "G2"), @@ -305,9 +266,8 @@ public void TupleCollapsingReversedJoin() ValueTuple.Create("A7", "B7", "C7", "D7", "E7", "F7", "G7"), ValueTuple.Create("A8", "B8", "C8", "D8", "E8", "F8", "G8"), ValueTuple.Create("A9", "B9", "C9", "D9", "E9", "F9", "G9"), - }, - results)); - } + }, + results)); } } } diff --git a/Sources/Runtime/Test.Psi/OperatorTests.cs b/Sources/Runtime/Test.Psi/OperatorTests.cs index 172d2ba19..5cce288d1 100644 --- a/Sources/Runtime/Test.Psi/OperatorTests.cs +++ b/Sources/Runtime/Test.Psi/OperatorTests.cs @@ -1331,19 +1331,34 @@ public void ZipAndMerge() [Timeout(60000)] public void FirstOperator() { - using (var p = Pipeline.Create()) - { - var source = Generators.Range(p, 0, 10, TimeSpan.FromMilliseconds(10)); - var first1 = source.First().ToObservable().ToListObservable(); - var first5 = source.First(5).ToObservable().ToListObservable(); - var firstN = source.First(int.MaxValue).ToObservable().ToListObservable(); + using var p = Pipeline.Create(); + var source = Generators.Range(p, 0, 10, TimeSpan.FromMilliseconds(10)); + var first1 = source.First().ToObservable().ToListObservable(); + var first5 = source.First(5).ToObservable().ToListObservable(); + var firstN = source.First(int.MaxValue).ToObservable().ToListObservable(); - p.Run(); + p.Run(); - CollectionAssert.AreEqual(new[] { 0 }, first1.AsEnumerable().ToArray()); - CollectionAssert.AreEqual(new[] { 0, 1, 2, 3, 4 }, first5.AsEnumerable().ToArray()); - CollectionAssert.AreEqual(new[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, firstN.AsEnumerable().ToArray()); - } + CollectionAssert.AreEqual(new[] { 0 }, first1.AsEnumerable().ToArray()); + CollectionAssert.AreEqual(new[] { 0, 1, 2, 3, 4 }, first5.AsEnumerable().ToArray()); + CollectionAssert.AreEqual(new[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, firstN.AsEnumerable().ToArray()); + } + + [TestMethod] + [Timeout(60000)] + public void LastOperator() + { + using var p = Pipeline.Create(); + var source = Generators.Range(p, 0, 10, TimeSpan.FromMilliseconds(10)); + var last1 = source.Last().ToObservable().ToListObservable(); + var last5 = source.Last(5).ToObservable().ToListObservable(); + var lastN = source.Last(int.MaxValue).ToObservable().ToListObservable(); + + p.Run(); + + CollectionAssert.AreEqual(new[] { 9 }, last1.AsEnumerable().ToArray()); + CollectionAssert.AreEqual(new[] { 5, 6, 7, 8, 9 }, last5.AsEnumerable().ToArray()); + CollectionAssert.AreEqual(new[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, lastN.AsEnumerable().ToArray()); } private static DateTime SelectMiddleTimestamp(IEnumerable times) diff --git a/Sources/Runtime/Test.Psi/PairTests.cs b/Sources/Runtime/Test.Psi/PairTests.cs index 9005c0f99..cf3ad8db7 100644 --- a/Sources/Runtime/Test.Psi/PairTests.cs +++ b/Sources/Runtime/Test.Psi/PairTests.cs @@ -20,8 +20,8 @@ public void PairDelayedPrimary() using (var pipeline = Pipeline.Create()) { Generators.Range(pipeline, 0, 2, TimeSpan.FromSeconds(1)); // hold pipeline open - var primary = Generators.Range(pipeline, 0, 5, TimeSpan.FromTicks(1)).Delay(TimeSpan.FromMilliseconds(100)); var secondary = Generators.Range(pipeline, 0, 5, TimeSpan.FromTicks(1)); + var primary = secondary.Delay(TimeSpan.FromMilliseconds(100)); var paired = primary.Pair(secondary).ToObservable().ToListObservable(); var fused = primary.Fuse(secondary, Available.Last()).ToObservable().ToListObservable(); pipeline.Run(); @@ -42,7 +42,7 @@ public void PairDelayedSecondary() { Generators.Range(pipeline, 0, 2, TimeSpan.FromSeconds(1)); // hold pipeline open var primary = Generators.Range(pipeline, 0, 5, TimeSpan.FromTicks(1)); - var secondary = Generators.Range(pipeline, 0, 5, TimeSpan.FromTicks(1)).Delay(TimeSpan.FromMilliseconds(100)); + var secondary = primary.Delay(TimeSpan.FromMilliseconds(100)); var paired = primary.Pair(secondary).ToObservable().ToListObservable(); var fused = primary.Fuse(secondary, Available.Last()).ToObservable().ToListObservable(); pipeline.Run(); @@ -63,7 +63,7 @@ public void PairDelayedSecondaryWithInitialValue() { Generators.Range(pipeline, 0, 2, TimeSpan.FromSeconds(1)); // hold pipeline open var primary = Generators.Range(pipeline, 0, 5, TimeSpan.FromTicks(1)); - var secondary = Generators.Range(pipeline, 0, 5, TimeSpan.FromTicks(1)).Delay(TimeSpan.FromMilliseconds(100)); + var secondary = primary.Delay(TimeSpan.FromMilliseconds(100)); var paired = primary.Pair(secondary, 42).ToObservable().ToListObservable(); var fused = primary.Fuse(secondary, Available.LastOrDefault(42)).ToObservable().ToListObservable(); pipeline.Run(); @@ -83,8 +83,8 @@ public void PairDelayedPrimaryWithOutputCreator() using (var pipeline = Pipeline.Create()) { Generators.Range(pipeline, 0, 2, TimeSpan.FromSeconds(1)); // hold pipeline open - var primary = Generators.Range(pipeline, 0, 5, TimeSpan.FromTicks(1)).Delay(TimeSpan.FromMilliseconds(100)); var secondary = Generators.Range(pipeline, 0, 5, TimeSpan.FromTicks(1)); + var primary = secondary.Delay(TimeSpan.FromMilliseconds(100)); var paired = primary.Pair(secondary, (p, s) => p * 10 + s).ToObservable().ToListObservable(); var fused = primary.Fuse(secondary, Available.Last(), (p, s) => p * 10 + s).ToObservable().ToListObservable(); pipeline.Run(); @@ -105,7 +105,7 @@ public void PairDelayedSecondryWithInitialValueAndOutputCreator() { Generators.Range(pipeline, 0, 2, TimeSpan.FromSeconds(1)); // hold pipeline open var primary = Generators.Range(pipeline, 0, 5, TimeSpan.FromTicks(1)); - var secondary = Generators.Range(pipeline, 0, 5, TimeSpan.FromTicks(1)).Delay(TimeSpan.FromMilliseconds(100)); + var secondary = primary.Delay(TimeSpan.FromMilliseconds(100)); var paired = primary.Pair(secondary, (p, s) => p * 10 + s, 7).ToObservable().ToListObservable(); var fused = primary.Fuse(secondary, Available.LastOrDefault(7), (p, s) => p * 10 + s).ToObservable().ToListObservable(); pipeline.Run(); diff --git a/Sources/Runtime/Test.Psi/PersistenceTest.cs b/Sources/Runtime/Test.Psi/PersistenceTest.cs index 3953d0330..1973ca385 100644 --- a/Sources/Runtime/Test.Psi/PersistenceTest.cs +++ b/Sources/Runtime/Test.Psi/PersistenceTest.cs @@ -1147,11 +1147,16 @@ public void RepairInvalidStore() // since the store will be rendered valid when the pipeline is terminated. Directory.CreateDirectory(tempFolder); - // copy the store files to the temp folder - we will restore them later + // Copy the store files to the temp folder - we will restore them later. + // The Live marker file cannot be copied because the writer still holds + // an exclusive lock. foreach (var file in Directory.EnumerateFiles(invalidStore.Path)) { var fileInfo = new FileInfo(file); - File.Copy(file, Path.Combine(tempFolder, fileInfo.Name)); + if (!fileInfo.Name.EndsWith(".Live")) + { + File.Copy(file, Path.Combine(tempFolder, fileInfo.Name)); + } } } }).Write("seq", invalidStore); diff --git a/Sources/Runtime/Test.Psi/SerializationTester.cs b/Sources/Runtime/Test.Psi/SerializationTester.cs index 681a3bdc0..62a57a52d 100644 --- a/Sources/Runtime/Test.Psi/SerializationTester.cs +++ b/Sources/Runtime/Test.Psi/SerializationTester.cs @@ -977,6 +977,8 @@ private class PocoSerializer : ISerializer { public int Version => throw new NotImplementedException(); + public bool? IsClearRequired => false; + public TypeSchema Initialize(KnownSerializers serializers, TypeSchema targetSchema) { return null; diff --git a/Sources/Runtime/Test.Psi/Test.Psi.csproj b/Sources/Runtime/Test.Psi/Test.Psi.csproj index c802d7da6..c67b597d8 100644 --- a/Sources/Runtime/Test.Psi/Test.Psi.csproj +++ b/Sources/Runtime/Test.Psi/Test.Psi.csproj @@ -42,7 +42,7 @@ all runtime; build; native; contentfiles; analyzers - + diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Adapters.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Adapters.cs new file mode 100644 index 000000000..919a8dd7d --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Adapters.cs @@ -0,0 +1,169 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +#pragma warning disable SA1649 // File name should match first type name +#pragma warning disable SA1402 // File may only contain a single type + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using System.Collections.Generic; + using System.Linq; + using Microsoft.Psi.Spatial.Euclidean; + using Microsoft.Psi.Visualization.Adapters; + using Microsoft.Psi.Visualization.Data; + + /// + /// Adapter for encoded image rectangles. + /// + [StreamAdapter] + public class EncodedImageRectangle3DAdapter : StreamAdapter + { + private readonly EncodedImageToImageAdapter imageAdapter = new (); + + /// + public override ImageRectangle3D GetAdaptedValue(EncodedImageRectangle3D source, Envelope envelope) + { + if (source != null) + { + var encodedImage = this.imageAdapter.GetAdaptedValue(source.Image, envelope); + if (encodedImage != null) + { + return new ImageRectangle3D(source.Rectangle3D, encodedImage); + } + } + + return null; + } + + /// + public override void Dispose(ImageRectangle3D destination) + { + if (destination != null) + { + this.imageAdapter.Dispose(destination.Image); + } + } + } + + /// + /// Adapter for encoded depth image rectangles. + /// + [StreamAdapter] + public class EncodedDepthImageRectangle3DAdapter : StreamAdapter + { + private readonly EncodedDepthImageToDepthImageAdapter imageAdapter = new (); + + /// + public override DepthImageRectangle3D GetAdaptedValue(EncodedDepthImageRectangle3D source, Envelope envelope) + { + if (source != null) + { + var encodedDepthImage = this.imageAdapter.GetAdaptedValue(source.DepthImage, envelope); + if (encodedDepthImage != null) + { + return new DepthImageRectangle3D(source.Rectangle3D, encodedDepthImage); + } + } + + return null; + } + + /// + public override void Dispose(DepthImageRectangle3D destination) + { + if (destination != null) + { + this.imageAdapter.Dispose(destination.DepthImage); + } + } + } + + /// + /// Adapter for list of nullable to list of nullable . + /// + [StreamAdapter] + public class NullableEncodedDepthImageRectangle3DListAdapter : StreamAdapter, List> + { + private readonly EncodedDepthImageToDepthImageAdapter imageAdapter = new (); + + /// + public override List GetAdaptedValue(List source, Envelope envelope) + { + if (source != null) + { + List outputList = new (); + foreach (var inputRectangle in source) + { + if (inputRectangle != null) + { + var encodedDepthImage = this.imageAdapter.GetAdaptedValue(inputRectangle.DepthImage, envelope); + if (encodedDepthImage != null) + { + outputList.Add(new DepthImageRectangle3D(inputRectangle.Rectangle3D, encodedDepthImage)); + } + } + } + + return outputList; + } + + return null; + } + + /// + public override void Dispose(List destination) + { + foreach (var imgRect in destination) + { + if (imgRect != null) + { + this.imageAdapter.Dispose(imgRect.DepthImage); + } + } + } + + /// + /// Adapter for to nullable . + /// + [StreamAdapter] + public class Rectangle3DToNullableAdapter : StreamAdapter + { + /// + public override Rectangle3D? GetAdaptedValue(Rectangle3D source, Envelope envelope) + => source; + } + + /// + /// Adapter for list of to list of nullable . + /// + [StreamAdapter] + public class Rectangle3DListToNullableAdapter : StreamAdapter, List> + { + /// + public override List GetAdaptedValue(List source, Envelope envelope) + => source?.Select(p => p as Rectangle3D?).ToList(); + } + + /// + /// Adapter for to nullable . + /// + [StreamAdapter] + public class Box3DToNullableAdapter : StreamAdapter + { + /// + public override Box3D? GetAdaptedValue(Box3D source, Envelope envelope) + => source; + } + + /// + /// Adapter for list of to list of nullable . + /// + [StreamAdapter] + public class Box3DListToNullableAdapter : StreamAdapter, List> + { + /// + public override List GetAdaptedValue(List source, Envelope envelope) + => source?.Select(p => p as Box3D?).ToList(); + } + } +} \ No newline at end of file diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Box3DListVisualizationObject.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Box3DListVisualizationObject.cs new file mode 100644 index 000000000..8e0172269 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Box3DListVisualizationObject.cs @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using Microsoft.Psi.Spatial.Euclidean; + using Microsoft.Psi.Visualization.VisualizationObjects; + + /// + /// Implements a visualization object that can display lists of 3D boxes. + /// + [VisualizationObject("3D boxes")] + public class Box3DListVisualizationObject : ModelVisual3DListVisualizationObject + { + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Box3DVisualizationObject.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Box3DVisualizationObject.cs new file mode 100644 index 000000000..3e06c3168 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Box3DVisualizationObject.cs @@ -0,0 +1,317 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using System; + using System.ComponentModel; + using System.Runtime.Serialization; + using System.Windows.Media; + using System.Windows.Media.Media3D; + using HelixToolkit.Wpf; + using Microsoft.Psi.Spatial.Euclidean; + using Microsoft.Psi.Visualization.Extensions; + using Microsoft.Psi.Visualization.VisualizationObjects; + + /// + /// Implements a 3D box visualization object. + /// + [VisualizationObject("3D box")] + public class Box3DVisualizationObject : ModelVisual3DVisualizationObject + { + // Edge properties + private Color edgeColor = Colors.White; + private double edgeThicknessMm = 3; + private double edgeOpacity = 100; + private bool edgeVisible = true; + private int pipeDiv = 7; + + // Facet properties + private Color facetColor = Colors.DodgerBlue; + private double facetOpacity = 10; + private bool facetVisible = true; + + // The edges and facets that make up the 3D rectangle + private PipeVisual3D[] edges; + private MeshGeometryVisual3D[] facets; + + /// + /// Initializes a new instance of the class. + /// + public Box3DVisualizationObject() + { + this.CreateBoxEdges(); + this.CreateBoxFacets(); + this.UpdateFacetContents(); + + this.UpdateVisibility(); + } + + /// + /// Gets or sets the color. + /// + [DataMember] + [DisplayName("Edge Color")] + [Description("The color of the box edges.")] + public Color EdgeColor + { + get { return this.edgeColor; } + set { this.Set(nameof(this.EdgeColor), ref this.edgeColor, value); } + } + + /// + /// Gets or sets the edge thickness in millimeters. + /// + [DataMember] + [DisplayName("Edge Thickness (mm)")] + [Description("The thickness of the box edges in millimeters.")] + public double EdgeThicknessMm + { + get { return this.edgeThicknessMm; } + set { this.Set(nameof(this.EdgeThicknessMm), ref this.edgeThicknessMm, value); } + } + + /// + /// Gets or sets the edge opacity. + /// + [DataMember] + [DisplayName("Edge Opacity")] + [Description("The opacity of the box edges.")] + public double EdgeOpacity + { + get { return this.edgeOpacity; } + set { this.Set(nameof(this.EdgeOpacity), ref this.edgeOpacity, value); } + } + + /// + /// Gets or sets a value indicating whether the edges are displayed. + /// + [DataMember] + [DisplayName("Edge Visibility")] + [Description("Indicates whether the edges are displayed or not.")] + public bool EdgeVisible + { + get { return this.edgeVisible; } + set { this.Set(nameof(this.EdgeVisible), ref this.edgeVisible, value); } + } + + /// + /// Gets or sets the number of divisions to use when rendering each edge as a pipe. + /// + [DataMember] + [DisplayName("Pipe Divisions")] + [Description("Number of divisions to use when rendering each rectangle edge as a pipe (minimum value is 3).")] + public int PipeDivisions + { + get { return this.pipeDiv; } + set { this.Set(nameof(this.PipeDivisions), ref this.pipeDiv, value < 3 ? 3 : value); } + } + + /// + /// Gets or sets the facet color. + /// + [DataMember] + [DisplayName("Facet Color")] + [Description("The color of the box facets.")] + public Color FacetColor + { + get { return this.facetColor; } + set { this.Set(nameof(this.FacetColor), ref this.facetColor, value); } + } + + /// + /// Gets or sets the facet opacity. + /// + [DataMember] + [DisplayName("Facet Opacity")] + [Description("The opacity of the box facets.")] + public double FacetOpacity + { + get { return this.facetOpacity; } + set { this.Set(nameof(this.FacetOpacity), ref this.facetOpacity, value); } + } + + /// + /// Gets or sets a value indicating whether the edges are displayed. + /// + [DataMember] + [DisplayName("Facet Visibility")] + [Description("Indicates whether the box facets are displayed or not.")] + public bool FacetVisible + { + get { return this.facetVisible; } + set { this.Set(nameof(this.FacetVisible), ref this.facetVisible, value); } + } + + /// + public override void UpdateData() + { + if (this.CurrentData.HasValue) + { + // Update the edge locations. + var box = this.CurrentData.Value; + var p0 = (box.Origin + box.XAxis.ScaleBy(box.Bounds.Min.X) + box.YAxis.ScaleBy(box.Bounds.Min.Y) + box.ZAxis.ScaleBy(box.Bounds.Min.Z)).ToPoint3D(); + var p1 = (box.Origin + box.XAxis.ScaleBy(box.Bounds.Max.X) + box.YAxis.ScaleBy(box.Bounds.Min.Y) + box.ZAxis.ScaleBy(box.Bounds.Min.Z)).ToPoint3D(); + var p2 = (box.Origin + box.XAxis.ScaleBy(box.Bounds.Max.X) + box.YAxis.ScaleBy(box.Bounds.Max.Y) + box.ZAxis.ScaleBy(box.Bounds.Min.Z)).ToPoint3D(); + var p3 = (box.Origin + box.XAxis.ScaleBy(box.Bounds.Min.X) + box.YAxis.ScaleBy(box.Bounds.Max.Y) + box.ZAxis.ScaleBy(box.Bounds.Min.Z)).ToPoint3D(); + var p4 = (box.Origin + box.XAxis.ScaleBy(box.Bounds.Min.X) + box.YAxis.ScaleBy(box.Bounds.Min.Y) + box.ZAxis.ScaleBy(box.Bounds.Max.Z)).ToPoint3D(); + var p5 = (box.Origin + box.XAxis.ScaleBy(box.Bounds.Max.X) + box.YAxis.ScaleBy(box.Bounds.Min.Y) + box.ZAxis.ScaleBy(box.Bounds.Max.Z)).ToPoint3D(); + var p6 = (box.Origin + box.XAxis.ScaleBy(box.Bounds.Max.X) + box.YAxis.ScaleBy(box.Bounds.Max.Y) + box.ZAxis.ScaleBy(box.Bounds.Max.Z)).ToPoint3D(); + var p7 = (box.Origin + box.XAxis.ScaleBy(box.Bounds.Min.X) + box.YAxis.ScaleBy(box.Bounds.Max.Y) + box.ZAxis.ScaleBy(box.Bounds.Max.Z)).ToPoint3D(); + + this.UpdateLinePosition(this.edges[0], p0, p1); + this.UpdateLinePosition(this.edges[1], p1, p2); + this.UpdateLinePosition(this.edges[2], p2, p3); + this.UpdateLinePosition(this.edges[3], p3, p0); + this.UpdateLinePosition(this.edges[4], p4, p5); + this.UpdateLinePosition(this.edges[5], p5, p6); + this.UpdateLinePosition(this.edges[6], p6, p7); + this.UpdateLinePosition(this.edges[7], p7, p4); + this.UpdateLinePosition(this.edges[8], p0, p4); + this.UpdateLinePosition(this.edges[9], p1, p5); + this.UpdateLinePosition(this.edges[10], p2, p6); + this.UpdateLinePosition(this.edges[11], p3, p7); + + // Update the facets. + for (int i = 0; i < this.facets.Length; i++) + { + var rectangle = this.CurrentData.Value.GetFacet((Box3DFacet)Enum.GetValues(typeof(Box3DFacet)).GetValue(i)); + this.facets[i].MeshGeometry.Positions[0] = rectangle.TopLeft.ToPoint3D(); + this.facets[i].MeshGeometry.Positions[1] = rectangle.TopRight.ToPoint3D(); + this.facets[i].MeshGeometry.Positions[2] = rectangle.BottomRight.ToPoint3D(); + this.facets[i].MeshGeometry.Positions[3] = rectangle.BottomLeft.ToPoint3D(); + } + } + + this.UpdateVisibility(); + } + + /// + public override void NotifyPropertyChanged(string propertyName) + { + // Check if the changed property is one that require updating the lines in the image. + if (propertyName == nameof(this.EdgeColor) || + propertyName == nameof(this.EdgeOpacity) || + propertyName == nameof(this.EdgeThicknessMm) || + propertyName == nameof(this.PipeDivisions)) + { + this.UpdateLineProperties(); + } + else if (propertyName == nameof(this.FacetColor) || + propertyName == nameof(this.FacetOpacity)) + { + this.UpdateFacetContents(); + } + else if (propertyName == nameof(this.Visible) || + propertyName == nameof(this.EdgeVisible) || + propertyName == nameof(this.FacetVisible)) + { + this.UpdateVisibility(); + } + } + + private void CreateBoxEdges() + { + // Create the edges + this.edges = new PipeVisual3D[12]; + for (int i = 0; i < this.edges.Length; i++) + { + this.edges[i] = new PipeVisual3D(); + } + + // Set the color, thickness, opacity + this.UpdateLineProperties(); + } + + private void CreateBoxFacets() + { + this.facets = new MeshGeometryVisual3D[6]; + for (int i = 0; i < this.facets.Length; i++) + { + this.facets[i] = new MeshGeometryVisual3D + { + MeshGeometry = new MeshGeometry3D(), + }; + + this.facets[i].MeshGeometry.Positions.Add(default); + this.facets[i].MeshGeometry.Positions.Add(default); + this.facets[i].MeshGeometry.Positions.Add(default); + this.facets[i].MeshGeometry.Positions.Add(default); + this.facets[i].MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(0, 0)); + this.facets[i].MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(1, 0)); + this.facets[i].MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(1, 1)); + this.facets[i].MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(0, 1)); + + this.facets[i].MeshGeometry.TriangleIndices.Add(0); + this.facets[i].MeshGeometry.TriangleIndices.Add(1); + this.facets[i].MeshGeometry.TriangleIndices.Add(2); + + this.facets[i].MeshGeometry.TriangleIndices.Add(0); + this.facets[i].MeshGeometry.TriangleIndices.Add(2); + this.facets[i].MeshGeometry.TriangleIndices.Add(3); + + this.facets[i].MeshGeometry.TriangleIndices.Add(0); + this.facets[i].MeshGeometry.TriangleIndices.Add(3); + this.facets[i].MeshGeometry.TriangleIndices.Add(2); + + this.facets[i].MeshGeometry.TriangleIndices.Add(0); + this.facets[i].MeshGeometry.TriangleIndices.Add(2); + this.facets[i].MeshGeometry.TriangleIndices.Add(1); + } + } + + private void UpdateFacetContents() + { + double opacity = Math.Max(0, Math.Min(100, this.facetOpacity)); + var alphaColor = Color.FromArgb( + (byte)(opacity * 2.55), + this.facetColor.R, + this.facetColor.G, + this.facetColor.B); + var material = new DiffuseMaterial(new SolidColorBrush(alphaColor)); + + for (int i = 0; i < this.facets.Length; i++) + { + this.facets[i].Material = material; + this.facets[i].BackMaterial = material; + } + } + + private void UpdateLinePosition(Visual3D visual, Point3D point1, Point3D point2) + { + PipeVisual3D line = visual as PipeVisual3D; + line.Point1 = point1; + line.Point2 = point2; + } + + private void UpdateVisibility() + { + foreach (var line in this.edges) + { + this.UpdateChildVisibility(line, this.Visible && this.EdgeVisible && this.CurrentData.HasValue); + } + + foreach (var facet in this.facets) + { + this.UpdateChildVisibility(facet, this.Visible && this.FacetVisible && this.CurrentData.HasValue); + } + } + + private void UpdateLineProperties() + { + double opacity = Math.Max(0, Math.Min(100, this.EdgeOpacity)); + var alphaColor = Color.FromArgb( + (byte)(opacity * 2.55), + this.EdgeColor.R, + this.EdgeColor.G, + this.EdgeColor.B); + + foreach (PipeVisual3D line in this.edges) + { + line.Diameter = this.EdgeThicknessMm / 1000.0; + line.Fill = new SolidColorBrush(alphaColor); + line.ThetaDiv = this.PipeDivisions; + } + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/DepthImageRectangle3DListVisualizationObject.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/DepthImageRectangle3DListVisualizationObject.cs new file mode 100644 index 000000000..4ac64a7a3 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/DepthImageRectangle3DListVisualizationObject.cs @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using Microsoft.Psi.Spatial.Euclidean; + using Microsoft.Psi.Visualization.VisualizationObjects; + + /// + /// Implements a 3D depth image rectangle list visualization object. + /// + [VisualizationObject("3D Depth Image Rectangles")] + public class DepthImageRectangle3DListVisualizationObject : ModelVisual3DListVisualizationObject + { + } +} \ No newline at end of file diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/DepthImageRectangle3DVisualizationObject.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/DepthImageRectangle3DVisualizationObject.cs new file mode 100644 index 000000000..44133accc --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/DepthImageRectangle3DVisualizationObject.cs @@ -0,0 +1,398 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using System; + using System.ComponentModel; + using System.Runtime.Serialization; + using System.Windows.Media; + using System.Windows.Media.Media3D; + using HelixToolkit.Wpf; + using Microsoft.Psi.Imaging; + using Microsoft.Psi.Spatial.Euclidean; + using Microsoft.Psi.Visualization; + using Microsoft.Psi.Visualization.Extensions; + using Microsoft.Psi.Visualization.VisualizationObjects; + + /// + /// Implements a 3D depth image rectangle visualization object. + /// + [VisualizationObject("3D Depth Image Rectangle")] + public class DepthImageRectangle3DVisualizationObject : ModelVisual3DVisualizationObject + { + private readonly MeshGeometryVisual3D depthImageModelVisual; + private readonly DisplayImage displayImage; + private readonly PipeVisual3D[] borderEdges; + + // Fill properties + private double imageOpacity = 100; + private Shared depthImage = null; + + // Border properties + private Color borderColor = Colors.White; + private double borderThicknessMm = 15; + private double borderOpacity = 100; + private int pipeDiv = 7; + + /// + /// The depth image range. + /// + private DepthImageRangeMode rangeMode = DepthImageRangeMode.Maximum; + + /// + /// Indicates the value in the depth image that is considered invalid and pseudo-colorized as transparent. + /// + private int invalidValue = -1; + + /// + /// Indicates whether to render invalid depths as transparent. + /// + private bool invalidAsTransparent = false; + + /// + /// Indicates the minimum of the depth values range in the image. + /// + private int rangeMin = 0; + + /// + /// Indicates the maximum of the depth values range in the image. + /// + private int rangeMax = 65535; + + /// + /// Initializes a new instance of the class. + /// + public DepthImageRectangle3DVisualizationObject() + { + // Create a rectangle mesh for the image + this.displayImage = new DisplayImage(); + this.depthImageModelVisual = new MeshGeometryVisual3D + { + MeshGeometry = new MeshGeometry3D(), + }; + + this.depthImageModelVisual.MeshGeometry.Positions.Add(default); + this.depthImageModelVisual.MeshGeometry.Positions.Add(default); + this.depthImageModelVisual.MeshGeometry.Positions.Add(default); + this.depthImageModelVisual.MeshGeometry.Positions.Add(default); + this.depthImageModelVisual.MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(0, 0)); + this.depthImageModelVisual.MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(1, 0)); + this.depthImageModelVisual.MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(1, 1)); + this.depthImageModelVisual.MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(0, 1)); + + this.depthImageModelVisual.MeshGeometry.TriangleIndices.Add(0); + this.depthImageModelVisual.MeshGeometry.TriangleIndices.Add(1); + this.depthImageModelVisual.MeshGeometry.TriangleIndices.Add(2); + + this.depthImageModelVisual.MeshGeometry.TriangleIndices.Add(0); + this.depthImageModelVisual.MeshGeometry.TriangleIndices.Add(2); + this.depthImageModelVisual.MeshGeometry.TriangleIndices.Add(3); + + this.depthImageModelVisual.MeshGeometry.TriangleIndices.Add(0); + this.depthImageModelVisual.MeshGeometry.TriangleIndices.Add(3); + this.depthImageModelVisual.MeshGeometry.TriangleIndices.Add(2); + + this.depthImageModelVisual.MeshGeometry.TriangleIndices.Add(0); + this.depthImageModelVisual.MeshGeometry.TriangleIndices.Add(2); + this.depthImageModelVisual.MeshGeometry.TriangleIndices.Add(1); + + // Create the border lines + this.borderEdges = new PipeVisual3D[4]; + for (int i = 0; i < this.borderEdges.Length; i++) + { + this.borderEdges[i] = new PipeVisual3D(); + } + + // Set the color, thickness, opacity + this.UpdateLineProperties(); + + this.UpdateVisibility(); + } + + /// + /// Gets or sets the image opacity. + /// + [DataMember] + [DisplayName("Image Opacity")] + [Description("The image opacity inside the rectangle.")] + public double ImageOpacity + { + get { return this.imageOpacity; } + set { this.Set(nameof(this.ImageOpacity), ref this.imageOpacity, value); } + } + + /// + /// Gets or sets the border color. + /// + [DataMember] + [DisplayName("Border Color")] + [Description("The color of the rectangle border.")] + public Color BorderColor + { + get { return this.borderColor; } + set { this.Set(nameof(this.BorderColor), ref this.borderColor, value); } + } + + /// + /// Gets or sets the border thickness. + /// + [DataMember] + [DisplayName("Border Thickness (mm)")] + [Description("The thickness of the rectangle border in millimeters.")] + public double BorderThicknessMm + { + get { return this.borderThicknessMm; } + set { this.Set(nameof(this.BorderThicknessMm), ref this.borderThicknessMm, value); } + } + + /// + /// Gets or sets the border opacity. + /// + [DataMember] + [DisplayName("Border Opacity")] + [Description("The opacity of the rectangle border.")] + public double BorderOpacity + { + get { return this.borderOpacity; } + set { this.Set(nameof(this.BorderOpacity), ref this.borderOpacity, value); } + } + + /// + /// Gets or sets the number of divisions to use when rendering each edge as a pipe. + /// + [DataMember] + [DisplayName("Pipe Divisions")] + [Description("Number of divisions to use when rendering each rectangle edge as a pipe (minimum value is 3).")] + public int PipeDivisions + { + get { return this.pipeDiv; } + set { this.Set(nameof(this.PipeDivisions), ref this.pipeDiv, value < 3 ? 3 : value); } + } + + /// + /// Gets or sets a value indicating an invalid depth. + /// + [DataMember] + [DisplayName("Invalid Value")] + [Description("Specifies the pixel value that denotes an invalid depth.")] + public int InvalidValue + { + get { return this.invalidValue; } + set { this.Set(nameof(this.InvalidValue), ref this.invalidValue, value); } + } + + /// + /// Gets or sets a value indicating whether to render invalid depths as transparent. + /// + [DataMember] + [DisplayName("Invalid Value as Transparent")] + [Description("Indicates whether to render invalid depths as transparent.")] + public bool InvalidAsTransparent + { + get { return this.invalidAsTransparent; } + set { this.Set(nameof(this.InvalidAsTransparent), ref this.invalidAsTransparent, value); } + } + + /// + /// Gets or sets the range of values to use. + /// + [DataMember] + [DisplayName("Range Mode")] + [Description("Specifies the range of depth values in the image.")] + public DepthImageRangeMode RangeMode + { + get => this.rangeMode; + set + { + this.Set(nameof(this.RangeMode), ref this.rangeMode, value); + if (this.rangeMode != DepthImageRangeMode.Auto && this.rangeMode != DepthImageRangeMode.Custom) + { + (var min, var max, var invalid) = DepthImageVisualizationObject.GetRange(this.rangeMode); + this.SetRange(min, max); + this.InvalidValue = invalid; + } + } + } + + /// + /// Gets or sets a value indicating the minimum of the depth values range in the image. + /// + [DataMember] + [DisplayName("Range Min")] + [Description("Specifies the minimum depth value for pseudo-colorizing the image.")] + public int RangeMin + { + get => this.rangeMin; + set + { + if (value != this.rangeMin) + { + this.RangeMode = DepthImageRangeMode.Custom; + this.Set(nameof(this.RangeMin), ref this.rangeMin, value); + } + } + } + + /// + /// Gets or sets a value indicating the maximum of the depth values range in the image. + /// + [DataMember] + [DisplayName("Range Max")] + [Description("Specifies the maximum depth value for pseudo-colorizing the image.")] + public int RangeMax + { + get => this.rangeMax; + set + { + if (value != this.rangeMax) + { + this.RangeMode = DepthImageRangeMode.Custom; + this.Set(nameof(this.RangeMax), ref this.rangeMax, value); + } + } + } + + /// + protected override Action Deallocator => data => data?.Dispose(); + + /// + public override void UpdateData() + { + if (this.CurrentData != null && !this.CurrentData.Rectangle3D.IsDegenerate && + this.CurrentData.DepthImage != null && this.CurrentData.DepthImage.Resource != null) + { + if (this.depthImage != null) + { + this.depthImage.Dispose(); + } + + this.depthImage = this.CurrentData.DepthImage.AddRef(); + + var rectangle = this.CurrentData.Rectangle3D; + var topLeftPoint3D = rectangle.TopLeft.ToPoint3D(); + var topRightPoint3D = rectangle.TopRight.ToPoint3D(); + var bottomRightPoint3D = rectangle.BottomRight.ToPoint3D(); + var bottomLeftPoint3D = rectangle.BottomLeft.ToPoint3D(); + + this.depthImageModelVisual.MeshGeometry.Positions[0] = topLeftPoint3D; + this.depthImageModelVisual.MeshGeometry.Positions[1] = topRightPoint3D; + this.depthImageModelVisual.MeshGeometry.Positions[2] = bottomRightPoint3D; + this.depthImageModelVisual.MeshGeometry.Positions[3] = bottomLeftPoint3D; + + this.UpdateLinePosition(this.borderEdges[0], topLeftPoint3D, topRightPoint3D); + this.UpdateLinePosition(this.borderEdges[1], topRightPoint3D, bottomRightPoint3D); + this.UpdateLinePosition(this.borderEdges[2], bottomRightPoint3D, bottomLeftPoint3D); + this.UpdateLinePosition(this.borderEdges[3], bottomLeftPoint3D, topLeftPoint3D); + + this.UpdateRectangleContents(); + } + + this.UpdateVisibility(); + } + + /// + public override void NotifyPropertyChanged(string propertyName) + { + if (propertyName == nameof(this.ImageOpacity) || + propertyName == nameof(this.RangeMax) || + propertyName == nameof(this.RangeMin) || + propertyName == nameof(this.RangeMode) || + propertyName == nameof(this.InvalidValue) || + propertyName == nameof(this.InvalidAsTransparent)) + { + this.UpdateRectangleContents(); + } + else if (propertyName == nameof(this.BorderColor) || + propertyName == nameof(this.BorderOpacity) || + propertyName == nameof(this.BorderThicknessMm) || + propertyName == nameof(this.PipeDivisions)) + { + this.UpdateLineProperties(); + } + else if (propertyName == nameof(this.Visible)) + { + this.UpdateVisibility(); + } + } + + private void UpdateRectangleContents() + { + if (this.depthImage != null && this.depthImage.Resource != null) + { + // Update the display image + using var sharedColorizedImage = ImagePool.GetOrCreate( + this.depthImage.Resource.Width, + this.depthImage.Resource.Height, + Imaging.PixelFormat.BGRA_32bpp); + + if (this.RangeMode == DepthImageRangeMode.Auto) + { + (var minRange, var maxRange) = this.depthImage.Resource.GetPixelRange(); + this.SetRange(minRange, maxRange); + } + + this.depthImage.Resource.PseudoColorize( + sharedColorizedImage.Resource, + ((ushort)this.RangeMin, (ushort)this.RangeMax), + (this.InvalidValue < 0) ? null : (ushort)this.InvalidValue, + this.InvalidAsTransparent); + + this.displayImage.UpdateImage(sharedColorizedImage); + + // Render the display image + var material = new DiffuseMaterial(new ImageBrush(this.displayImage.Image) { Opacity = this.imageOpacity * 0.01 }); + this.depthImageModelVisual.Material = material; + this.depthImageModelVisual.BackMaterial = material; + } + } + + private void UpdateLinePosition(Visual3D visual, Point3D point1, Point3D point2) + { + PipeVisual3D line = visual as PipeVisual3D; + line.Point1 = point1; + line.Point2 = point2; + } + + private void UpdateLineProperties() + { + double opacity = Math.Max(0, Math.Min(100, this.borderOpacity)); + var alphaColor = Color.FromArgb( + (byte)(opacity * 2.55), + this.borderColor.R, + this.borderColor.G, + this.borderColor.B); + + foreach (PipeVisual3D line in this.borderEdges) + { + line.Diameter = this.borderThicknessMm / 1000.0; + line.Fill = new SolidColorBrush(alphaColor); + line.ThetaDiv = this.pipeDiv; + } + } + + private void UpdateVisibility() + { + var visibleBorder = this.Visible && this.CurrentData != null && !this.CurrentData.Rectangle3D.IsDegenerate; + var visibleImage = visibleBorder && this.depthImage != null && this.depthImage.Resource != null; + + this.UpdateChildVisibility(this.depthImageModelVisual, visibleImage); + + foreach (PipeVisual3D line in this.borderEdges) + { + this.UpdateChildVisibility(line, visibleBorder); + } + } + + /// + /// Programmatically sets the range without altering the range compute mode. + /// + /// The new range minimum value. + /// The new range maximum value. + private void SetRange(int rangeMin, int rangeMax) + { + this.Set(nameof(this.RangeMin), ref this.rangeMin, rangeMin); + this.Set(nameof(this.RangeMax), ref this.rangeMax, rangeMax); + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/ImageRectangle3DVisualizationObject.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/ImageRectangle3DVisualizationObject.cs new file mode 100644 index 000000000..f719035d5 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/ImageRectangle3DVisualizationObject.cs @@ -0,0 +1,257 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using System; + using System.ComponentModel; + using System.Runtime.Serialization; + using System.Windows.Media; + using System.Windows.Media.Media3D; + using HelixToolkit.Wpf; + using Microsoft.Psi.Imaging; + using Microsoft.Psi.Spatial.Euclidean; + using Microsoft.Psi.Visualization; + using Microsoft.Psi.Visualization.Extensions; + using Microsoft.Psi.Visualization.VisualizationObjects; + + /// + /// Implements a 3D image rectangle visualization object. + /// + [VisualizationObject("3D Image Rectangle")] + public class ImageRectangle3DVisualizationObject : ModelVisual3DVisualizationObject + { + private readonly MeshGeometryVisual3D modelVisual; + private readonly DisplayImage displayImage; + private readonly PipeVisual3D[] borderEdges; + + // Fill properties + private double imageOpacity = 100; + private Shared image = null; + + // Border properties + private Color borderColor = Colors.White; + private double borderThicknessMm = 15; + private double borderOpacity = 100; + private int pipeDiv = 7; + + /// + /// Initializes a new instance of the class. + /// + public ImageRectangle3DVisualizationObject() + { + // Create the fill mesh + this.displayImage = new DisplayImage(); + this.modelVisual = new MeshGeometryVisual3D + { + MeshGeometry = new MeshGeometry3D(), + }; + + this.modelVisual.MeshGeometry.Positions.Add(default); + this.modelVisual.MeshGeometry.Positions.Add(default); + this.modelVisual.MeshGeometry.Positions.Add(default); + this.modelVisual.MeshGeometry.Positions.Add(default); + this.modelVisual.MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(0, 0)); + this.modelVisual.MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(1, 0)); + this.modelVisual.MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(1, 1)); + this.modelVisual.MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(0, 1)); + + this.modelVisual.MeshGeometry.TriangleIndices.Add(0); + this.modelVisual.MeshGeometry.TriangleIndices.Add(1); + this.modelVisual.MeshGeometry.TriangleIndices.Add(2); + + this.modelVisual.MeshGeometry.TriangleIndices.Add(0); + this.modelVisual.MeshGeometry.TriangleIndices.Add(2); + this.modelVisual.MeshGeometry.TriangleIndices.Add(3); + + this.modelVisual.MeshGeometry.TriangleIndices.Add(0); + this.modelVisual.MeshGeometry.TriangleIndices.Add(3); + this.modelVisual.MeshGeometry.TriangleIndices.Add(2); + + this.modelVisual.MeshGeometry.TriangleIndices.Add(0); + this.modelVisual.MeshGeometry.TriangleIndices.Add(2); + this.modelVisual.MeshGeometry.TriangleIndices.Add(1); + + // Create the border lines + this.borderEdges = new PipeVisual3D[4]; + for (int i = 0; i < this.borderEdges.Length; i++) + { + this.borderEdges[i] = new PipeVisual3D(); + } + + // Set the color, thickness, opacity + this.UpdateLineProperties(); + + this.UpdateVisibility(); + } + + /// + /// Gets or sets the image opacity. + /// + [DataMember] + [DisplayName("Image Opacity")] + [Description("The image opacity inside the rectangle.")] + public double ImageOpacity + { + get { return this.imageOpacity; } + set { this.Set(nameof(this.ImageOpacity), ref this.imageOpacity, value); } + } + + /// + /// Gets or sets the border color. + /// + [DataMember] + [DisplayName("Border Color")] + [Description("The color of the rectangle border.")] + public Color BorderColor + { + get { return this.borderColor; } + set { this.Set(nameof(this.BorderColor), ref this.borderColor, value); } + } + + /// + /// Gets or sets the border thickness. + /// + [DataMember] + [DisplayName("Border Thickness (mm)")] + [Description("The thickness of the rectangle border in millimeters.")] + public double BorderThicknessMm + { + get { return this.borderThicknessMm; } + set { this.Set(nameof(this.BorderThicknessMm), ref this.borderThicknessMm, value); } + } + + /// + /// Gets or sets the border opacity. + /// + [DataMember] + [DisplayName("Border Opacity")] + [Description("The opacity of the rectangle border.")] + public double BorderOpacity + { + get { return this.borderOpacity; } + set { this.Set(nameof(this.BorderOpacity), ref this.borderOpacity, value); } + } + + /// + /// Gets or sets the number of divisions to use when rendering each edge as a pipe. + /// + [DataMember] + [DisplayName("Pipe Divisions")] + [Description("Number of divisions to use when rendering each rectangle edge as a pipe (minimum value is 3).")] + public int PipeDivisions + { + get { return this.pipeDiv; } + set { this.Set(nameof(this.PipeDivisions), ref this.pipeDiv, value < 3 ? 3 : value); } + } + + /// + protected override Action Deallocator => data => data?.Dispose(); + + /// + public override void UpdateData() + { + if (this.CurrentData != null && !this.CurrentData.Rectangle3D.IsDegenerate && + this.CurrentData.Image != null && this.CurrentData.Image.Resource != null) + { + if (this.image != null) + { + this.image.Dispose(); + } + + this.image = this.CurrentData.Image.AddRef(); + + var rectangle = this.CurrentData.Rectangle3D; + var topLeftPoint3D = rectangle.TopLeft.ToPoint3D(); + var topRightPoint3D = rectangle.TopRight.ToPoint3D(); + var bottomRightPoint3D = rectangle.BottomRight.ToPoint3D(); + var bottomLeftPoint3D = rectangle.BottomLeft.ToPoint3D(); + + this.modelVisual.MeshGeometry.Positions[0] = topLeftPoint3D; + this.modelVisual.MeshGeometry.Positions[1] = topRightPoint3D; + this.modelVisual.MeshGeometry.Positions[2] = bottomRightPoint3D; + this.modelVisual.MeshGeometry.Positions[3] = bottomLeftPoint3D; + + this.UpdateLinePosition(this.borderEdges[0], topLeftPoint3D, topRightPoint3D); + this.UpdateLinePosition(this.borderEdges[1], topRightPoint3D, bottomRightPoint3D); + this.UpdateLinePosition(this.borderEdges[2], bottomRightPoint3D, bottomLeftPoint3D); + this.UpdateLinePosition(this.borderEdges[3], bottomLeftPoint3D, topLeftPoint3D); + + this.UpdateRectangleContents(); + } + + this.UpdateVisibility(); + } + + /// + public override void NotifyPropertyChanged(string propertyName) + { + if (propertyName == nameof(this.ImageOpacity)) + { + this.UpdateRectangleContents(); + } + else if (propertyName == nameof(this.BorderColor) || + propertyName == nameof(this.BorderOpacity) || + propertyName == nameof(this.BorderThicknessMm) || + propertyName == nameof(this.PipeDivisions)) + { + this.UpdateLineProperties(); + } + else if (propertyName == nameof(this.Visible)) + { + this.UpdateVisibility(); + } + } + + private void UpdateRectangleContents() + { + if (this.image != null && this.image.Resource != null) + { + // Update the display image + this.displayImage.UpdateImage(this.image); + + // Render the display image + var material = new DiffuseMaterial(new ImageBrush(this.displayImage.Image) { Opacity = this.imageOpacity * 0.01 }); + this.modelVisual.Material = material; + this.modelVisual.BackMaterial = material; + } + } + + private void UpdateLinePosition(Visual3D visual, Point3D point1, Point3D point2) + { + PipeVisual3D line = visual as PipeVisual3D; + line.Point1 = point1; + line.Point2 = point2; + } + + private void UpdateLineProperties() + { + double opacity = Math.Max(0, Math.Min(100, this.borderOpacity)); + var alphaColor = Color.FromArgb( + (byte)(opacity * 2.55), + this.borderColor.R, + this.borderColor.G, + this.borderColor.B); + + foreach (PipeVisual3D line in this.borderEdges) + { + line.Diameter = this.borderThicknessMm / 1000.0; + line.Fill = new SolidColorBrush(alphaColor); + line.ThetaDiv = this.pipeDiv; + } + } + + private void UpdateVisibility() + { + var visibleBorder = this.Visible && this.CurrentData != null && !this.CurrentData.Rectangle3D.IsDegenerate; + var visibleImage = visibleBorder && this.image != null && this.image.Resource != null; + + this.UpdateChildVisibility(this.modelVisual, visibleImage); + + foreach (PipeVisual3D line in this.borderEdges) + { + this.UpdateChildVisibility(line, visibleBorder); + } + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/LinearVelocity3DListVisualizationObject.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/LinearVelocity3DListVisualizationObject.cs new file mode 100644 index 000000000..f8e056293 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/LinearVelocity3DListVisualizationObject.cs @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using Microsoft.Psi.Spatial.Euclidean; + using Microsoft.Psi.Visualization.VisualizationObjects; + + /// + /// Implements a visualization object that can display lists of linear velocity objects. + /// + [VisualizationObject("Linear 3D-velocities")] + public class LinearVelocity3DListVisualizationObject : ModelVisual3DListVisualizationObject + { + } +} \ No newline at end of file diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/LinearVelocity3DVisualizationObject.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/LinearVelocity3DVisualizationObject.cs new file mode 100644 index 000000000..c62fc63a7 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/LinearVelocity3DVisualizationObject.cs @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using System.ComponentModel; + using System.Runtime.Serialization; + using System.Windows.Media; + using HelixToolkit.Wpf; + using Microsoft.Psi.Spatial.Euclidean; + using Microsoft.Psi.Visualization.VisualizationObjects; + using Win3D = System.Windows.Media.Media3D; + + /// + /// Implements a linear velocity visualization object. + /// + [VisualizationObject("Linear 3D-velocity")] + public class LinearVelocity3DVisualizationObject : ModelVisual3DVisualizationObject + { + private const int ThetaDivDefault = 5; + + private readonly ArrowVisual3D velocityArrow; + + private Color color = Colors.Orange; + private double thicknessMm = 15; + + /// + /// Initializes a new instance of the class. + /// + public LinearVelocity3DVisualizationObject() + { + this.velocityArrow = new ArrowVisual3D() { ThetaDiv = ThetaDivDefault, Fill = new SolidColorBrush(this.Color), Diameter = this.ThicknessMm / 1000.0 }; + + this.UpdateVisibility(); + } + + /// + /// Gets or sets the color. + /// + [DataMember] + [Description("The color of the velocity vector.")] + public Color Color + { + get { return this.color; } + set { this.Set(nameof(this.Color), ref this.color, value); } + } + + /// + /// Gets or sets the thickness. + /// + [DataMember] + [DisplayName("Thickness (mm)")] + [Description("The vector diameter in millimeters.")] + public double ThicknessMm + { + get { return this.thicknessMm; } + set { this.Set(nameof(this.ThicknessMm), ref this.thicknessMm, value); } + } + + /// + public override void UpdateData() + { + this.UpdateVelocityVector(); + this.UpdateVisibility(); + } + + /// + public override void NotifyPropertyChanged(string propertyName) + { + if (propertyName == nameof(this.Color)) + { + this.velocityArrow.Fill = new SolidColorBrush(this.Color); + } + else if (propertyName == nameof(this.ThicknessMm)) + { + this.velocityArrow.Diameter = this.ThicknessMm / 1000.0; + } + else if (propertyName == nameof(this.Visible)) + { + this.UpdateVisibility(); + } + } + + private void UpdateVelocityVector() + { + if (this.CurrentData != null) + { + if (this.CurrentData.Speed > 0) + { + var endPoint = this.CurrentData.Origin + this.CurrentData.Vector; + + this.velocityArrow.BeginEdit(); + this.velocityArrow.Point1 = new Win3D.Point3D(this.CurrentData.Origin.X, this.CurrentData.Origin.Y, this.CurrentData.Origin.Z); + this.velocityArrow.Point2 = new Win3D.Point3D(endPoint.X, endPoint.Y, endPoint.Z); + this.velocityArrow.EndEdit(); + } + } + } + + private void UpdateVisibility() + { + this.UpdateChildVisibility(this.velocityArrow, this.Visible && this.CurrentData != null && this.CurrentData.Speed > 0); + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Mesh3DListVisualizationObject.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Mesh3DListVisualizationObject.cs new file mode 100644 index 000000000..9bc1bf1d6 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Mesh3DListVisualizationObject.cs @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using Microsoft.Psi.Spatial.Euclidean; + using Microsoft.Psi.Visualization.VisualizationObjects; + + /// + /// Implements a visualization object that can display lists of 3D meshes. + /// + [VisualizationObject("3D Meshes")] + public class Mesh3DListVisualizationObject : ModelVisual3DListVisualizationObject + { + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Mesh3DVisualizationObject.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Mesh3DVisualizationObject.cs new file mode 100644 index 000000000..702455512 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Mesh3DVisualizationObject.cs @@ -0,0 +1,124 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using System; + using System.ComponentModel; + using System.Linq; + using System.Runtime.Serialization; + using System.Windows.Media; + using System.Windows.Media.Media3D; + using HelixToolkit.Wpf; + using Microsoft.Psi.Visualization.Extensions; + using Microsoft.Psi.Visualization.VisualizationObjects; + using Mesh3D = Microsoft.Psi.Spatial.Euclidean.Mesh3D; + + /// + /// Implements a 3D mesh visualization object. + /// + [VisualizationObject("3D Mesh")] + public class Mesh3DVisualizationObject : ModelVisual3DVisualizationObject + { + private readonly MeshGeometryVisual3D modelVisual; + + // Fill properties + private Color fillColor = Colors.DodgerBlue; + private double fillOpacity = 100; + private bool fillVisible = true; + + /// + /// Initializes a new instance of the class. + /// + public Mesh3DVisualizationObject() + { + this.modelVisual = new MeshGeometryVisual3D + { + MeshGeometry = new MeshGeometry3D(), + }; + + this.UpdateVisibility(); + } + + /// + /// Gets or sets the fill color. + /// + [DataMember] + [DisplayName("Fill Color")] + [Description("The fill color of the mesh.")] + public Color FillColor + { + get { return this.fillColor; } + set { this.Set(nameof(this.FillColor), ref this.fillColor, value); } + } + + /// + /// Gets or sets the fill opacity. + /// + [DataMember] + [DisplayName("Fill Opacity")] + [Description("The fill opacity of the mesh.")] + public double FillOpacity + { + get { return this.fillOpacity; } + set { this.Set(nameof(this.FillOpacity), ref this.fillOpacity, value); } + } + + /// + /// Gets or sets a value indicating whether the mesh is filled. + /// + [DataMember] + [DisplayName("Fill Visibility")] + [Description("Indicates whether the mesh is filled.")] + public bool FillVisible + { + get { return this.fillVisible; } + set { this.Set(nameof(this.FillVisible), ref this.fillVisible, value); } + } + + /// + public override void UpdateData() + { + var mesh = this.CurrentData; + var geometry = new MeshGeometry3D(); + geometry.Positions = new Point3DCollection(mesh.Vertices.Select(p => p.ToPoint3D())); + geometry.TriangleIndices = new Int32Collection(mesh.TriangleIndices.Select(i => (int)i)); + this.modelVisual.MeshGeometry = geometry; + + this.UpdateVisibility(); + } + + /// + public override void NotifyPropertyChanged(string propertyName) + { + if (propertyName == nameof(this.FillColor) || + propertyName == nameof(this.FillOpacity)) + { + this.UpdateMeshContents(); + } + else if (propertyName == nameof(this.Visible) || + propertyName == nameof(this.FillVisible)) + { + this.UpdateVisibility(); + } + } + + private void UpdateMeshContents() + { + double opacity = Math.Max(0, Math.Min(100, this.fillOpacity)); + var alphaColor = Color.FromArgb( + (byte)(opacity * 2.55), + this.fillColor.R, + this.fillColor.G, + this.fillColor.B); + var material = new DiffuseMaterial(new SolidColorBrush(alphaColor)); + this.modelVisual.Material = material; + this.modelVisual.BackMaterial = material; + } + + private void UpdateVisibility() + { + this.UpdateChildVisibility(this.modelVisual, this.Visible && this.FillVisible); + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows.csproj b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows.csproj new file mode 100644 index 000000000..9f648c23b --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows.csproj @@ -0,0 +1,47 @@ + + + + net472 + Microsoft.Psi.Spatial.Euclidean.Visualization + Provides visualization objects and adapters for various types defined in Microsoft.Psi.Spatial.Euclidean. + + + + true + + ..\..\..\Build\Microsoft.Psi.ruleset + bin\Debug\net472\Microsoft.Psi.Spatial.Visualization.Windows.xml + + + + true + + ..\..\..\Build\Microsoft.Psi.ruleset + bin\Release\net472\Microsoft.Psi.Spatial.Visualization.Windows.xml + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/NumericalVoxelGridVisualizationObject.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/NumericalVoxelGridVisualizationObject.cs new file mode 100644 index 000000000..49a92d6ff --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/NumericalVoxelGridVisualizationObject.cs @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using System.ComponentModel; + using System.Runtime.Serialization; + using System.Windows.Media; + using HelixToolkit.Wpf; + using Microsoft.Psi.Spatial.Euclidean; + using Microsoft.Psi.Visualization.Extensions; + using Microsoft.Psi.Visualization.VisualizationObjects; + + /// + /// Implements a visualization object for a voxel grid of doubles. + /// + [VisualizationObject("Voxel grid.")] + public class NumericalVoxelGridVisualizationObject : VoxelGridVisualizationObject + { + private double threshold = 0; + private Color fillColor = Colors.Blue; + + /// + /// Gets or sets the value threshold defining which voxels are shown. + /// + [DataMember] + [DisplayName("Threshold")] + [Description("The threshold value below which the voxel is not shown.")] + public double Threshold + { + get { return this.threshold; } + set { this.Set(nameof(this.Threshold), ref this.threshold, value); } + } + + /// + /// Gets or sets the voxels fill color. + /// + [DataMember] + [DisplayName("Fill Color")] + [Description("The fill color of the voxels.")] + public Color FillColor + { + get { return this.fillColor; } + set { this.Set(nameof(this.FillColor), ref this.fillColor, value); } + } + + /// + public override void NotifyPropertyChanged(string propertyName) + { + if (propertyName == nameof(this.Threshold)) + { + this.UpdateData(); + } + + base.NotifyPropertyChanged(propertyName); + } + + /// + protected override void UpdateVoxelVisuals(BoxVisual3D voxelVisual3D, Voxel voxel) + { + voxelVisual3D.BeginEdit(); + + voxelVisual3D.Visible = this.GetVoxelVisibility(voxel); + + if (voxelVisual3D.Visible) + { + voxelVisual3D.Fill = new LinearGradientBrush(this.FillColor, Color.FromArgb(128, this.FillColor.R, this.FillColor.G, this.FillColor.B), 45.0d); + voxelVisual3D.Width = voxel.VoxelSize * 0.9d; + voxelVisual3D.Height = voxel.VoxelSize * 0.9d; + voxelVisual3D.Length = voxel.VoxelSize * 0.9d; + voxelVisual3D.Center = voxel.GetCenter().ToPoint3D(); + } + + voxelVisual3D.EndEdit(); + } + + private bool GetVoxelVisibility(Voxel voxel) + => voxel.Value > this.threshold; + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/PointCloud3DVisualizationObject.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/PointCloud3DVisualizationObject.cs new file mode 100644 index 000000000..d7817f28b --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/PointCloud3DVisualizationObject.cs @@ -0,0 +1,118 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using System.ComponentModel; + using System.Linq; + using System.Runtime.Serialization; + using System.Windows.Media; + using System.Windows.Media.Media3D; + using HelixToolkit.Wpf; + using Microsoft.Psi.Spatial.Euclidean; + using Microsoft.Psi.Visualization.VisualizationObjects; + + /// + /// Implements a visualization object for . + /// + [VisualizationObject("Point Cloud 3D")] + public class PointCloud3DVisualizationObject : ModelVisual3DVisualizationObject + { + private readonly PointsVisual3D pointsVisual3D; + + private Color color = Colors.Gray; + private double pointSize = 1.0; + private string numberOfPoints = "N/A"; + + /// + /// Initializes a new instance of the class. + /// + public PointCloud3DVisualizationObject() + { + this.pointsVisual3D = new PointsVisual3D() + { + Color = this.color, + Size = this.pointSize, + }; + } + + /// + /// Gets or sets the point cloud color. + /// + [DataMember] + [DisplayName("Color")] + [Description("The color of the point cloud.")] + public Color Color + { + get { return this.color; } + set { this.Set(nameof(this.Color), ref this.color, value); } + } + + /// + /// Gets or sets the point size. + /// + [DataMember] + [DisplayName("Point Size")] + [Description("The size of a point in the cloud.")] + public double PointSize + { + get { return this.pointSize; } + set { this.Set(nameof(this.PointSize), ref this.pointSize, value); } + } + + /// + /// Gets the number of points in the point-cloud. + /// + [IgnoreDataMember] + [Browsable(true)] + [DisplayName("Number of points")] + [Description("The number of points in the current point cloud.")] + public string NumberOfPoints + { + get => this.numberOfPoints; + private set => this.Set(nameof(this.NumberOfPoints), ref this.numberOfPoints, value); + } + + /// + public override void UpdateData() + { + this.NumberOfPoints = this.CurrentData != null ? this.CurrentData.NumberOfPoints.ToString() : "N/A"; + this.UpdateVisuals(); + this.UpdateVisibility(); + } + + /// + public override void NotifyPropertyChanged(string propertyName) + { + if (propertyName == nameof(this.Color)) + { + this.pointsVisual3D.Color = this.Color; + } + else if (propertyName == nameof(this.PointSize)) + { + this.pointsVisual3D.Size = this.PointSize; + } + else if (propertyName == nameof(this.Visible)) + { + this.UpdateVisibility(); + } + } + + private void UpdateVisuals() + { + if (this.CurrentData == null) + { + this.pointsVisual3D.Points.Clear(); + } + else + { + this.pointsVisual3D.Points = new Point3DCollection(this.CurrentData.Select(p => new Point3D(p.X, p.Y, p.Z))); + } + } + + private void UpdateVisibility() + { + this.UpdateChildVisibility(this.pointsVisual3D, this.Visible && this.CurrentData != default); + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Rectangle3DListVisualizationObject.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Rectangle3DListVisualizationObject.cs new file mode 100644 index 000000000..febf67896 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Rectangle3DListVisualizationObject.cs @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using Microsoft.Psi.Spatial.Euclidean; + using Microsoft.Psi.Visualization.VisualizationObjects; + + /// + /// Implements a visualization object that can display lists of 3D rectangles. + /// + [VisualizationObject("3D Rectangles")] + public class Rectangle3DListVisualizationObject : ModelVisual3DListVisualizationObject + { + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Rectangle3DVisualizationObject.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Rectangle3DVisualizationObject.cs new file mode 100644 index 000000000..744a6f7f9 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/Rectangle3DVisualizationObject.cs @@ -0,0 +1,277 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using System; + using System.ComponentModel; + using System.Runtime.Serialization; + using System.Windows.Media; + using System.Windows.Media.Media3D; + using HelixToolkit.Wpf; + using Microsoft.Psi.Spatial.Euclidean; + using Microsoft.Psi.Visualization.Extensions; + using Microsoft.Psi.Visualization.VisualizationObjects; + + /// + /// Implements a 3D rectangles visualization object. + /// + [VisualizationObject("3D Rectangle")] + public class Rectangle3DVisualizationObject : ModelVisual3DVisualizationObject + { + private readonly MeshGeometryVisual3D modelVisual; + private readonly PipeVisual3D[] borderEdges; + + // Fill properties + private Color fillColor = Colors.DodgerBlue; + private double fillOpacity = 100; + private bool fillVisible = true; + + // Border properties + private Color borderColor = Colors.White; + private double borderThicknessMm = 15; + private double borderOpacity = 100; + private bool borderVisible = true; + private int pipeDiv = 7; + + /// + /// Initializes a new instance of the class. + /// + public Rectangle3DVisualizationObject() + { + // Create the fill mesh + this.modelVisual = new MeshGeometryVisual3D + { + MeshGeometry = new MeshGeometry3D(), + }; + + this.modelVisual.MeshGeometry.Positions.Add(default); + this.modelVisual.MeshGeometry.Positions.Add(default); + this.modelVisual.MeshGeometry.Positions.Add(default); + this.modelVisual.MeshGeometry.Positions.Add(default); + this.modelVisual.MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(0, 0)); + this.modelVisual.MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(1, 0)); + this.modelVisual.MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(1, 1)); + this.modelVisual.MeshGeometry.TextureCoordinates.Add(new System.Windows.Point(0, 1)); + + this.modelVisual.MeshGeometry.TriangleIndices.Add(0); + this.modelVisual.MeshGeometry.TriangleIndices.Add(1); + this.modelVisual.MeshGeometry.TriangleIndices.Add(2); + + this.modelVisual.MeshGeometry.TriangleIndices.Add(0); + this.modelVisual.MeshGeometry.TriangleIndices.Add(2); + this.modelVisual.MeshGeometry.TriangleIndices.Add(3); + + this.modelVisual.MeshGeometry.TriangleIndices.Add(0); + this.modelVisual.MeshGeometry.TriangleIndices.Add(3); + this.modelVisual.MeshGeometry.TriangleIndices.Add(2); + + this.modelVisual.MeshGeometry.TriangleIndices.Add(0); + this.modelVisual.MeshGeometry.TriangleIndices.Add(2); + this.modelVisual.MeshGeometry.TriangleIndices.Add(1); + + // Create the border lines + this.borderEdges = new PipeVisual3D[4]; + for (int i = 0; i < this.borderEdges.Length; i++) + { + this.borderEdges[i] = new PipeVisual3D(); + } + + // Set the color, thickness, opacity + this.UpdateLineProperties(); + + this.UpdateVisibility(); + } + + /// + /// Gets or sets the fill color. + /// + [DataMember] + [DisplayName("Fill Color")] + [Description("The fill color of the rectangle.")] + public Color FillColor + { + get { return this.fillColor; } + set { this.Set(nameof(this.FillColor), ref this.fillColor, value); } + } + + /// + /// Gets or sets the fill opacity. + /// + [DataMember] + [DisplayName("Fill Opacity")] + [Description("The fill opacity of the rectangle.")] + public double FillOpacity + { + get { return this.fillOpacity; } + set { this.Set(nameof(this.FillOpacity), ref this.fillOpacity, value); } + } + + /// + /// Gets or sets a value indicating whether the rectangle is filled. + /// + [DataMember] + [DisplayName("Fill Visibility")] + [Description("Indicates whether the rectangle is filled.")] + public bool FillVisible + { + get { return this.fillVisible; } + set { this.Set(nameof(this.FillVisible), ref this.fillVisible, value); } + } + + /// + /// Gets or sets the border color. + /// + [DataMember] + [DisplayName("Border Color")] + [Description("The color of the rectangle border.")] + public Color BorderColor + { + get { return this.borderColor; } + set { this.Set(nameof(this.BorderColor), ref this.borderColor, value); } + } + + /// + /// Gets or sets the border thickness. + /// + [DataMember] + [DisplayName("Border Thickness (mm)")] + [Description("The thickness of the rectangle border in millimeters.")] + public double BorderThicknessMm + { + get { return this.borderThicknessMm; } + set { this.Set(nameof(this.BorderThicknessMm), ref this.borderThicknessMm, value); } + } + + /// + /// Gets or sets the border opacity. + /// + [DataMember] + [DisplayName("Border Opacity")] + [Description("The opacity of the rectangle border.")] + public double BorderOpacity + { + get { return this.borderOpacity; } + set { this.Set(nameof(this.BorderOpacity), ref this.borderOpacity, value); } + } + + /// + /// Gets or sets a value indicating whether the border is shown. + /// + [DataMember] + [DisplayName("Border Visibility")] + [Description("Indicates whether the rectangle border is shown.")] + public bool BorderVisible + { + get { return this.borderVisible; } + set { this.Set(nameof(this.BorderVisible), ref this.borderVisible, value); } + } + + /// + /// Gets or sets the number of divisions to use when rendering each edge as a pipe. + /// + [DataMember] + [DisplayName("Pipe Divisions")] + [Description("Number of divisions to use when rendering each rectangle edge as a pipe (minimum value is 3).")] + public int PipeDivisions + { + get { return this.pipeDiv; } + set { this.Set(nameof(this.PipeDivisions), ref this.pipeDiv, value < 3 ? 3 : value); } + } + + /// + public override void UpdateData() + { + if (this.CurrentData.HasValue && !this.CurrentData.Value.IsDegenerate) + { + var rectangle = this.CurrentData.Value; + var topLeftPoint3D = rectangle.TopLeft.ToPoint3D(); + var topRightPoint3D = rectangle.TopRight.ToPoint3D(); + var bottomRightPoint3D = rectangle.BottomRight.ToPoint3D(); + var bottomLeftPoint3D = rectangle.BottomLeft.ToPoint3D(); + + this.modelVisual.MeshGeometry.Positions[0] = topLeftPoint3D; + this.modelVisual.MeshGeometry.Positions[1] = topRightPoint3D; + this.modelVisual.MeshGeometry.Positions[2] = bottomRightPoint3D; + this.modelVisual.MeshGeometry.Positions[3] = bottomLeftPoint3D; + + this.UpdateLinePosition(this.borderEdges[0], topLeftPoint3D, topRightPoint3D); + this.UpdateLinePosition(this.borderEdges[1], topRightPoint3D, bottomRightPoint3D); + this.UpdateLinePosition(this.borderEdges[2], bottomRightPoint3D, bottomLeftPoint3D); + this.UpdateLinePosition(this.borderEdges[3], bottomLeftPoint3D, topLeftPoint3D); + } + + this.UpdateVisibility(); + } + + /// + public override void NotifyPropertyChanged(string propertyName) + { + if (propertyName == nameof(this.FillColor) || + propertyName == nameof(this.FillOpacity)) + { + this.UpdateRectangleContents(); + } + else if (propertyName == nameof(this.BorderColor) || + propertyName == nameof(this.BorderOpacity) || + propertyName == nameof(this.BorderThicknessMm) || + propertyName == nameof(this.PipeDivisions)) + { + this.UpdateLineProperties(); + } + else if (propertyName == nameof(this.Visible) || + propertyName == nameof(this.FillVisible) || + propertyName == nameof(this.BorderVisible)) + { + this.UpdateVisibility(); + } + } + + private void UpdateRectangleContents() + { + double opacity = Math.Max(0, Math.Min(100, this.fillOpacity)); + var alphaColor = Color.FromArgb( + (byte)(opacity * 2.55), + this.fillColor.R, + this.fillColor.G, + this.fillColor.B); + var material = new DiffuseMaterial(new SolidColorBrush(alphaColor)); + this.modelVisual.Material = material; + this.modelVisual.BackMaterial = material; + } + + private void UpdateLinePosition(Visual3D visual, Point3D point1, Point3D point2) + { + PipeVisual3D line = visual as PipeVisual3D; + line.Point1 = point1; + line.Point2 = point2; + } + + private void UpdateLineProperties() + { + double opacity = Math.Max(0, Math.Min(100, this.borderOpacity)); + var alphaColor = Color.FromArgb( + (byte)(opacity * 2.55), + this.borderColor.R, + this.borderColor.G, + this.borderColor.B); + + foreach (PipeVisual3D line in this.borderEdges) + { + line.Diameter = this.borderThicknessMm / 1000.0; + line.Fill = new SolidColorBrush(alphaColor); + line.ThetaDiv = this.pipeDiv; + } + } + + private void UpdateVisibility() + { + this.UpdateChildVisibility(this.modelVisual, this.Visible && this.FillVisible && this.CurrentData.HasValue && !this.CurrentData.Value.IsDegenerate); + + foreach (var line in this.borderEdges) + { + this.UpdateChildVisibility(line, this.Visible && this.BorderVisible && this.CurrentData.HasValue && !this.CurrentData.Value.IsDegenerate); + } + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/VoxelGridVisualizationObject.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/VoxelGridVisualizationObject.cs new file mode 100644 index 000000000..0c353f0b0 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/VoxelGridVisualizationObject.cs @@ -0,0 +1,82 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean.Visualization +{ + using System.ComponentModel; + using System.Linq; + using System.Runtime.Serialization; + using System.Windows.Media.Media3D; + using Microsoft.Psi.Spatial.Euclidean; + using Microsoft.Psi.Visualization.VisualizationObjects; + + /// + /// Implements a base visualization object for a voxel grid. + /// + /// The type of the voxel visual. + /// The type of data contained in the voxel. + public abstract class VoxelGridVisualizationObject : ModelVisual3DVisualizationObject> + where TVoxelVisual3D : Visual3D, new() + { + /// + /// Initializes a new instance of the class. + /// + public VoxelGridVisualizationObject() + { + this.VoxelVisuals = new (null); + this.UpdateVisibility(); + } + + /// + /// Gets the number of voxels. + /// + [DataMember] + [DisplayName("Voxel Count")] + [Description("The number of voxels.")] + public int VoxelCount => this.CurrentData != null ? this.CurrentData.Count() : 0; + + /// + /// Gets the visual nodes. + /// + protected UpdatableVisual3DDictionary<(int, int, int), TVoxelVisual3D> VoxelVisuals { get; } + + /// + public override void UpdateData() + { + this.VoxelVisuals.BeginUpdate(); + + if (this.CurrentData != null) + { + // update the joints + foreach (var voxel in this.CurrentData) + { + var nodeVisualizationObject = this.VoxelVisuals[voxel.Index]; + this.UpdateVoxelVisuals(nodeVisualizationObject, voxel); + } + } + + this.VoxelVisuals.EndUpdate(); + + this.UpdateVisibility(); + } + + /// + public override void NotifyPropertyChanged(string propertyName) + { + if (propertyName == nameof(this.Visible)) + { + this.UpdateVisibility(); + } + } + + /// + /// Provides an abstract method for updating voxel visualization. + /// + /// The voxel visual to update. + /// The voxel. + protected abstract void UpdateVoxelVisuals(TVoxelVisual3D voxelVisual3D, Voxel voxel); + + private void UpdateVisibility() + => this.UpdateChildVisibility(this.VoxelVisuals, this.Visible && this.CurrentData != null); + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/stylecop.json b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/stylecop.json new file mode 100644 index 000000000..6f09427eb --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean.Visualization.Windows/stylecop.json @@ -0,0 +1,16 @@ +{ + // ACTION REQUIRED: This file was automatically added to your project, but it + // will not take effect until additional steps are taken to enable it. See the + // following page for additional information: + // + // https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/EnableConfiguration.md + + "$schema": "https://raw.githubusercontent.com/DotNetAnalyzers/StyleCopAnalyzers/master/StyleCop.Analyzers/StyleCop.Analyzers/Settings/stylecop.schema.json", + "settings": { + "documentationRules": { + "companyName": "Microsoft Corporation", + "copyrightText": "Copyright (c) Microsoft Corporation. All rights reserved.\nLicensed under the MIT license.", + "xmlHeader": false + } + } +} \ No newline at end of file diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/AngularVelocity3D.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/AngularVelocity3D.cs new file mode 100644 index 000000000..879a9cb6d --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/AngularVelocity3D.cs @@ -0,0 +1,131 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using System; + using MathNet.Numerics.LinearAlgebra; + using MathNet.Spatial.Euclidean; + using static Microsoft.Psi.Calibration.CalibrationExtensions; + + /// + /// Represents an angular 3D velocity, starting from an original rotation, + /// around a particular axis of rotation. + /// + public readonly struct AngularVelocity3D : IEquatable + { + /// + /// The origin of rotation. + /// + public readonly Matrix OriginRotation; + + /// + /// The axis of angular velocity, along with the radians/time speed (length of the vector). + /// + public readonly Vector3D AxisAngleVector; + + /// + /// Initializes a new instance of the struct. + /// + /// The origin of rotation. + /// The axis-angle representation of velocity. + public AngularVelocity3D(Matrix originRotation, Vector3D axisAngleVector) + { + if (originRotation.RowCount != 3 || + originRotation.ColumnCount != 3) + { + throw new ArgumentException("Rotation matrix must be 3x3."); + } + + this.OriginRotation = originRotation; + this.AxisAngleVector = axisAngleVector; + } + + /// + /// Initializes a new instance of the struct. + /// + /// The origin of rotation. + /// The axis of velocity. + /// The angular speed (radians/time). + public AngularVelocity3D(Matrix originRotation, UnitVector3D axis, double speed) + : this(originRotation, axis.ScaleBy(speed)) + { + } + + /// + /// Initializes a new instance of the struct. + /// + /// The origin of rotation. + /// A destination rotation. + /// The time it took to reach the destination rotation. + public AngularVelocity3D(Matrix originRotation, Matrix destinationRotation, TimeSpan time) + { + if (originRotation.RowCount != 3 || + originRotation.ColumnCount != 3 || + destinationRotation.RowCount != 3 || + destinationRotation.ColumnCount != 3) + { + throw new ArgumentException("Rotation matrices must be 3x3."); + } + + this.OriginRotation = originRotation; + var axisAngleDistance = Vector3D.OfVector(MatrixToAxisAngle(destinationRotation * originRotation.Inverse())); + var angularSpeed = axisAngleDistance.Length / time.TotalSeconds; + this.AxisAngleVector = angularSpeed == 0 ? default : axisAngleDistance.Normalize().ScaleBy(angularSpeed); + } + + /// + /// Initializes a new instance of the struct. + /// + /// The origin coordinate system. + /// The destination coordinate system. + /// The time it took to reach the destination coordinate system. + public AngularVelocity3D(CoordinateSystem originCoordinateSystem, CoordinateSystem destinationCoordinateSystem, TimeSpan time) + : this(originCoordinateSystem.GetRotationSubMatrix(), destinationCoordinateSystem.GetRotationSubMatrix(), time) + { + } + + /// + /// Gets the magnitude of the velocity. + /// + public double Speed => this.AxisAngleVector.Length; + + /// + /// Returns a value indicating whether the specified velocities are the same. + /// + /// The first velocity. + /// The second velocity. + /// True if the velocities are the same; otherwise false. + public static bool operator ==(AngularVelocity3D left, AngularVelocity3D right) => left.Equals(right); + + /// + /// Returns a value indicating whether the specified velocities are different. + /// + /// The first velocity. + /// The second velocity. + /// True if the velocities are different; otherwise false. + public static bool operator !=(AngularVelocity3D left, AngularVelocity3D right) => !left.Equals(right); + + /// + public bool Equals(AngularVelocity3D other) => this.OriginRotation.Equals(other.OriginRotation) && this.AxisAngleVector == other.AxisAngleVector; + + /// + public override bool Equals(object obj) => obj is AngularVelocity3D other && this.Equals(other); + + /// + public override int GetHashCode() => HashCode.Combine(this.OriginRotation, this.AxisAngleVector); + + /// + /// Computes the destination rotation, if this velocity is followed for a given amount of time. + /// + /// The span of time to compute over. + /// The destination rotation. + /// The unit of time should be the same as assumed for the axis-angle velocity vector (e.g., seconds). + public Matrix ComputeDestination(double time) + { + var angularDistance = this.AxisAngleVector.Length * time; + var axisAngleDistance = this.AxisAngleVector.Normalize().ScaleBy(angularDistance); + return AxisAngleToMatrix(axisAngleDistance.ToVector()) * this.OriginRotation; + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Bounds3D.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Bounds3D.cs new file mode 100644 index 000000000..b4b89e203 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Bounds3D.cs @@ -0,0 +1,160 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using System; + using System.Collections.Generic; + using MathNet.Spatial.Euclidean; + + /// + /// Represents a 3-dimensional bounding box. + /// + public readonly struct Bounds3D : IEquatable + { + /// + /// The offset of the first diagonal corner of the bounds. + /// + public readonly Vector3D Min; + + /// + /// The offset of the second diagonal corner of the bounds. + /// + public readonly Vector3D Max; + + /// + /// Initializes a new instance of the struct. + /// + /// The x-offset of the first diagonal corner of the bounds. + /// The x-offset of the second diagonal corner of the bounds. + /// The y-offset of the first diagonal corner of the bounds. + /// The y-offset of the second diagonal corner of the bounds. + /// The z-offset of the first diagonal corner of the bounds. + /// The z-offset of the second diagonal corner of the bounds. + /// + /// The 3d bounds are defined by two opposite corners whose offsets are specified relative to origin. + /// + public Bounds3D(double x1, double x2, double y1, double y2, double z1, double z2) + { + this.Min = new Vector3D(Math.Min(x1, x2), Math.Min(y1, y2), Math.Min(z1, z2)); + this.Max = new Vector3D(Math.Max(x1, x2), Math.Max(y1, y2), Math.Max(z1, z2)); + } + + /// + /// Gets the size of the bounds along the x-axis. + /// + public double SizeX => this.Max.X - this.Min.X; + + /// + /// Gets the size of the bounds along the y-axis. + /// + public double SizeY => this.Max.Y - this.Min.Y; + + /// + /// Gets the size of the bounds along the z-axis. + /// + public double SizeZ => this.Max.Z - this.Min.Z; + + /// + /// Gets the geometric center of the bounds. + /// + public Point3D Center => Point3D.MidPoint(this.Min.ToPoint3D(), this.Max.ToPoint3D()); + + /// + /// Gets a value indicating whether the bounds are degenerate + /// (i.e. size zero in one or more dimensions). + /// + public bool IsDegenerate + { + get + { + return + this.Min.X == this.Max.X || + this.Min.Y == this.Max.Y || + this.Min.Z == this.Max.Z; + } + } + + /// + /// Returns a value indicating whether the specified bounds are the same. + /// + /// The first bounds. + /// The second bounds. + /// True if the bounds are the same; otherwise false. + public static bool operator ==(Bounds3D left, Bounds3D right) + { + return left.Equals(right); + } + + /// + /// Returns a value indicating whether the specified bounds are different. + /// + /// The first bounds. + /// The second bounds. + /// True if bounds are different; otherwise false. + public static bool operator !=(Bounds3D left, Bounds3D right) + { + return !left.Equals(right); + } + + /// + /// Gets the corner points for the bounds. + /// + /// An enumeration of corner points for the bounds. + public IEnumerable GetCorners() + { + // Corner indices (0 = Min, 7 = Max) + // 2------3 + // /| /| + // 0-+----1 | + // | | | | + // | 6----+-7 + // |/ |/ + // 4------5 + yield return new Point3D(this.Max.X, this.Max.Y, this.Max.Z); + yield return new Point3D(this.Max.X, this.Min.Y, this.Max.Z); + yield return new Point3D(this.Min.X, this.Max.Y, this.Max.Z); + yield return new Point3D(this.Min.X, this.Min.Y, this.Max.Z); + yield return new Point3D(this.Max.X, this.Max.Y, this.Min.Z); + yield return new Point3D(this.Max.X, this.Min.Y, this.Min.Z); + yield return new Point3D(this.Min.X, this.Max.Y, this.Min.Z); + yield return new Point3D(this.Min.X, this.Min.Y, this.Min.Z); + } + + /// + /// Determines whether the contains a point. + /// + /// The point to check. + /// True if this contains the point, otherwise false. + public bool ContainsPoint(Point3D point) + { + return + point.X >= this.Min.X && + point.X <= this.Max.X && + point.Y >= this.Min.Y && + point.Y <= this.Max.Y && + point.Z >= this.Min.Z && + point.Z <= this.Max.Z; + } + + /// + public bool Equals(Bounds3D other) + { + return + this.Min == other.Min && + this.Max == other.Max; + } + + /// + public override bool Equals(object obj) + { + return obj is Bounds3D other && this.Equals(other); + } + + /// + public override int GetHashCode() + { + return HashCode.Combine(this.Min, this.Max); + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Box3D.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Box3D.cs new file mode 100644 index 000000000..0afdae7e6 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Box3D.cs @@ -0,0 +1,363 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using System; + using System.Collections.Generic; + using System.Linq; + using MathNet.Spatial.Euclidean; + + /// + /// An enumeration of the facets for a 3D box. + /// + public enum Box3DFacet + { + /// + /// The facet that lies on the plane having the minimum value along the x-axis. + /// + MinX, + + /// + /// The facet that lies on the plane having the maximum value along the x-axis. + /// + MaxX, + + /// + /// The facet that lies on the plane having the minimum value along the y-axis. + /// + MinY, + + /// + /// The facet that lies on the plane having the maximum value along the y-axis. + /// + MaxY, + + /// + /// The facet that lies on the plane having the minimum value along the z-axis. + /// + MinZ, + + /// + /// The facet that lies on the plane having the maximum value along the z-axis. + /// + MaxZ, + } + + /// + /// Represents a 3D rectangular box. + /// + public readonly struct Box3D : IEquatable + { + /// + /// The pose of the box. + /// + public readonly CoordinateSystem Pose; + + /// + /// The bounds for the box. + /// + public readonly Bounds3D Bounds; + + /// + /// Initializes a new instance of the struct. + /// + /// The x-offset of the first diagonal corner of the box relative to its origin. + /// The x-offset of the second diagonal corner of the box relative to its origin. + /// The y-offset of the first diagonal corner of the box relative to its origin. + /// The y-offset of the second diagonal corner of the box relative to its origin. + /// The z-offset of the first diagonal corner of the box relative to its origin. + /// The z-offset of the second diagonal corner of the box relative to its origin. + /// An optional pose for the box (by default, at origin). + /// + /// The box is defined by two opposite corners whose offsets are specified relative to its origin. + /// The edges of the box are aligned to the x, y and z axes of its coordinate system. + /// + public Box3D(double x1, double x2, double y1, double y2, double z1, double z2, CoordinateSystem pose = null) + : this(new Bounds3D(x1, x2, y1, y2, z1, z2), pose) + { + } + + /// + /// Initializes a new instance of the struct. + /// + /// The bounds for the box. + /// An optional pose for the box (by default, at origin). + public Box3D(Bounds3D bounds, CoordinateSystem pose = null) + { + this.Pose = pose ?? new CoordinateSystem(); + this.Bounds = bounds; + } + + /// + /// Initializes a new instance of the struct. + /// + /// The origin of the rectangle. + /// The x-axis of the rectangle. + /// The y-axis of the rectangle. + /// The z-axis of the rectangle. + /// The x-offset of the first diagonal corner of the box relative to its origin. + /// The x-offset of the second diagonal corner of the box relative to its origin. + /// The y-offset of the first diagonal corner of the box relative to its origin. + /// The y-offset of the second diagonal corner of the box relative to its origin. + /// The z-offset of the first diagonal corner of the box relative to its origin. + /// The z-offset of the second diagonal corner of the box relative to its origin. + /// + /// The box is defined by two opposite corners whose offsets are specified relative to its origin. + /// The edges of the box are aligned to the specified x, y and z axes. + /// + [System.Diagnostics.CodeAnalysis.SuppressMessage("StyleCop.CSharp.NamingRules", "SA1305:Field names should not use Hungarian notation", Justification = "Allow use of param names xAxis, yAxis and zAxis.")] + public Box3D(Point3D origin, UnitVector3D xAxis, UnitVector3D yAxis, UnitVector3D zAxis, double x1, double x2, double y1, double y2, double z1, double z2) + : this(x1, x2, y1, y2, z1, z2, new CoordinateSystem(origin, xAxis, yAxis, zAxis)) + { + } + + /// + /// Gets the origin of the box. + /// + public Point3D Origin => this.Pose.Origin; + + /// + /// Gets the x-axis of the box. + /// + public UnitVector3D XAxis => this.Pose.XAxis.Normalize(); + + /// + /// Gets the y-axis of the box. + /// + public UnitVector3D YAxis => this.Pose.YAxis.Normalize(); + + /// + /// Gets the z-axis of the box. + /// + public UnitVector3D ZAxis => this.Pose.ZAxis.Normalize(); + + /// + /// Gets the geometric center of the box. + /// + public Point3D Center => this.Bounds.Center.TransformBy(this.Pose); + + /// + /// Gets the length of the box along its x-axis. + /// + public double LengthX => this.Bounds.SizeX; + + /// + /// Gets the length of the box along its y-axis. + /// + public double LengthY => this.Bounds.SizeY; + + /// + /// Gets the length of the box along its z-axis. + /// + public double LengthZ => this.Bounds.SizeZ; + + /// + /// Gets a value indicating whether the box is degenerate + /// (i.e. one or more of its edges has zero length). + /// + public bool IsDegenerate => this.Bounds.IsDegenerate; + + /// + /// Returns a value indicating whether the specified boxes are the same. + /// + /// The first box. + /// The second box. + /// True if the boxes are the same; otherwise false. + public static bool operator ==(Box3D left, Box3D right) => left.Equals(right); + + /// + /// Returns a value indicating whether the specified boxes are different. + /// + /// The first box. + /// The second box. + /// True if the boxes are different; otherwise false. + public static bool operator !=(Box3D left, Box3D right) => !left.Equals(right); + + /// + /// Gets the corner points of the box. + /// + /// An enumeration containing the corner points for the box. + public IEnumerable GetCorners() + { + var pose = this.Pose; + return this.Bounds.GetCorners().Select(p => p.TransformBy(pose)); + } + + /// + /// Determines whether the contains a point. + /// + /// The point to check. + /// True if this contains the point, otherwise false. + public bool ContainsPoint(Point3D point) => + this.Bounds.ContainsPoint(point.TransformBy(this.Pose.Invert())); + + /// + /// Computes the intersection point between a 3D ray and this box. + /// + /// The 3D ray. + /// The intersection point, if one exists. + public Point3D? IntersectionWith(Ray3D ray3D) + { + // If the box is degenerate, return. + // TODO: Better handle degenerate (e.g. planar) cases + if (this.IsDegenerate) + { + return null; + } + + var results = new List(); + + // Compute the intersection points with each face of the box + foreach (Box3DFacet facet in Enum.GetValues(typeof(Box3DFacet))) + { + var point3D = this.GetFacet(facet).IntersectionWith(ray3D); + if (point3D.HasValue) + { + results.Add(point3D.Value); + } + } + + // If there are any intersection points, get the closest one + return results.Any() ? results.OrderBy(p => p.DistanceTo(ray3D.ThroughPoint)).First() : null; + } + + /// + public bool Equals(Box3D other) => + this.Pose == other.Pose && this.Bounds == other.Bounds; + + /// + public override bool Equals(object obj) => + obj is Box3D other && this.Equals(other); + + /// + public override int GetHashCode() => + HashCode.Combine(this.Pose, this.Bounds); + + /// + /// Returns the facets for the box. + /// + /// An enumeration containing the facets for the box. + public IEnumerable GetFacets() + { + yield return this.GetFacet(Box3DFacet.MinX); + yield return this.GetFacet(Box3DFacet.MaxX); + yield return this.GetFacet(Box3DFacet.MinY); + yield return this.GetFacet(Box3DFacet.MaxY); + yield return this.GetFacet(Box3DFacet.MinZ); + yield return this.GetFacet(Box3DFacet.MaxZ); + } + + /// + /// Returns a representing the specified facet of this . + /// + /// The requested facet. + /// The representing the requested facet. + public Rectangle3D GetFacet(Box3DFacet facet) + { + return facet switch + { + // MinX Facet: + // Origin at lower-left corner (looking at the face from outside the box along the +X axis). + // Width axis points in -Y direction, and height axis points in +Z direction. + Box3DFacet.MinX => + new Rectangle3D( + new Point3D(this.Bounds.Min.X, this.Bounds.Max.Y, this.Bounds.Min.Z).TransformBy(this.Pose), + this.YAxis.Negate(), + this.ZAxis, + 0, + 0, + this.LengthY, + this.LengthZ), + + // MaxX Facet: + // Origin at lower-left corner (looking at the face from outside the box along the -X axis). + // Width axis points in +Y direction, and height axis points in +Z direction. + Box3DFacet.MaxX => + new Rectangle3D( + new Point3D(this.Bounds.Max.X, this.Bounds.Min.Y, this.Bounds.Min.Z).TransformBy(this.Pose), + this.YAxis, + this.ZAxis, + 0, + 0, + this.LengthY, + this.LengthZ), + + // MinY Facet: + // Origin at lower-left corner (looking at the face from outside the box along the +Y axis). + // Width axis points in +X direction, and height axis points in +Z direction. + Box3DFacet.MinY => + new Rectangle3D( + new Point3D(this.Bounds.Min.X, this.Bounds.Min.Y, this.Bounds.Min.Z).TransformBy(this.Pose), + this.XAxis, + this.ZAxis, + 0, + 0, + this.LengthX, + this.LengthZ), + + // MaxY Facet: + // Origin at lower-left corner (looking at the face from outside the box along the -Y axis). + // Width axis points in -X direction, and height axis points in +Z direction. + Box3DFacet.MaxY => + new Rectangle3D( + new Point3D(this.Bounds.Max.X, this.Bounds.Max.Y, this.Bounds.Min.Z).TransformBy(this.Pose), + this.XAxis.Negate(), + this.ZAxis, + 0, + 0, + this.LengthX, + this.LengthZ), + + // MinZ Facet: + // Origin at lower-left corner (looking at the face from outside the box along the +Z axis). + // Width axis points in -X direction, and height axis points in +Y direction. + Box3DFacet.MinZ => + new Rectangle3D( + new Point3D(this.Bounds.Max.X, this.Bounds.Min.Y, this.Bounds.Min.Z).TransformBy(this.Pose), + this.XAxis.Negate(), + this.YAxis, + 0, + 0, + this.LengthX, + this.LengthY), + + // MaxZ Facet: + // Origin at lower-left corner (looking at the face from outside the box along the -Z axis). + // Width axis points in +X direction, and height axis points in +Y direction. + Box3DFacet.MaxZ => + new Rectangle3D( + new Point3D(this.Bounds.Min.X, this.Bounds.Min.Y, this.Bounds.Max.Z).TransformBy(this.Pose), + this.XAxis, + this.YAxis, + 0, + 0, + this.LengthX, + this.LengthY), + + _ => throw new ArgumentException(nameof(facet)), + }; + } + + /// + /// Transforms the box by a coordinate system. + /// + /// The coordinate system to transform the box by. + /// The transformed box. + public Box3D TransformBy(CoordinateSystem coordinateSystem) + => new (this.Bounds, this.Pose.TransformBy(coordinateSystem)); + + /// + /// Convert to canonical form with origin in the center. + /// + /// in with origin in the center. + public Box3D ToCenteredBox3D() + { + var r = (this.Bounds.Min - this.Bounds.Max) / 2; + var bounds = new Bounds3D(-r.X, r.X, -r.Y, r.Y, -r.Z, r.Z); + var translate = CoordinateSystem.Translation(this.Origin.VectorTo(this.Center)); + var pose = this.Pose.TransformBy(translate); + return new Box3D(bounds, pose); + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/CoordinateSystemVelocity3D.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/CoordinateSystemVelocity3D.cs new file mode 100644 index 000000000..7077baf80 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/CoordinateSystemVelocity3D.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using System; + using MathNet.Spatial.Euclidean; + using static Microsoft.Psi.Calibration.CalibrationExtensions; + + /// + /// Represents a coordinate system velocity from a particular starting pose, + /// composing both a linear and angular velocity. + /// + public readonly struct CoordinateSystemVelocity3D : IEquatable + { + /// + /// The origin coordinate system. + /// + public readonly CoordinateSystem OriginCoordinateSystem; + + /// + /// The axis of angular velocity, along with the radians/time speed (length of the vector). + /// + public readonly Vector3D AxisAngleVector; + + /// + /// The linear velocity vector. Describes the direction of motion as well as the speed (length of the vector). + /// + public readonly Vector3D LinearVector; + + /// + /// Initializes a new instance of the struct. + /// + /// The origin coordinate system. + /// The axis-angle representation of angular velocity. + /// The linear velocity vector. + public CoordinateSystemVelocity3D( + CoordinateSystem originCoordinateSystem, + Vector3D axisAngleVector, + Vector3D linearVector) + { + this.OriginCoordinateSystem = originCoordinateSystem; + this.AxisAngleVector = axisAngleVector; + this.LinearVector = linearVector; + } + + /// + /// Initializes a new instance of the struct. + /// + /// The origin coordinate system. + /// The axis of angular velocity. + /// The angular speed around the axis. + /// The direction of linear velocity. + /// The linear speed. + public CoordinateSystemVelocity3D( + CoordinateSystem originCoordinateSystem, + UnitVector3D angularAxis, + double angularSpeed, + UnitVector3D linearDirection, + double linearSpeed) + : this(originCoordinateSystem, angularAxis.ScaleBy(angularSpeed), linearDirection.ScaleBy(linearSpeed)) + { + } + + /// + /// Initializes a new instance of the struct. + /// + /// The origin coordinate system. + /// A destination coordinate system. + /// The time it took to reach the destination coordinate system. + public CoordinateSystemVelocity3D( + CoordinateSystem originCoordinateSystem, + CoordinateSystem destinationCoordinateSystem, + TimeSpan time) + { + this.OriginCoordinateSystem = originCoordinateSystem; + var coordinateDifference = destinationCoordinateSystem.TransformBy(originCoordinateSystem.Invert()); + var timeInSeconds = time.TotalSeconds; + this.LinearVector = coordinateDifference.Origin.ToVector3D().ScaleBy(1.0 / timeInSeconds); + var axisAngleDistance = Vector3D.OfVector(MatrixToAxisAngle(coordinateDifference.GetRotationSubMatrix())); + var angularSpeed = axisAngleDistance.Length / timeInSeconds; + this.AxisAngleVector = angularSpeed == 0 ? default : axisAngleDistance.Normalize().ScaleBy(angularSpeed); + } + + /// + /// Returns a value indicating whether the specified velocities are the same. + /// + /// The first velocity. + /// The second velocity. + /// True if the velocities are the same; otherwise false. + public static bool operator ==(CoordinateSystemVelocity3D left, CoordinateSystemVelocity3D right) => left.Equals(right); + + /// + /// Returns a value indicating whether the specified velocities are different. + /// + /// The first velocity. + /// The second velocity. + /// True if the velocities are different; otherwise false. + public static bool operator !=(CoordinateSystemVelocity3D left, CoordinateSystemVelocity3D right) => !left.Equals(right); + + /// + public bool Equals(CoordinateSystemVelocity3D other) => this.OriginCoordinateSystem.Equals(other.OriginCoordinateSystem) && this.AxisAngleVector == other.AxisAngleVector && this.LinearVector == other.LinearVector; + + /// + public override bool Equals(object obj) => obj is CoordinateSystemVelocity3D other && this.Equals(other); + + /// + public override int GetHashCode() => HashCode.Combine(this.OriginCoordinateSystem, this.AxisAngleVector, this.LinearVector); + + /// + /// Get the linear velocity component of this coordinate system velocity. + /// + /// The linear velocity. + public LinearVelocity3D GetLinearVelocity() => new (this.OriginCoordinateSystem.Origin, this.LinearVector); + + /// + /// Get the angular velocity component of this coordinate system velocity. + /// + /// The angular velocity. + public AngularVelocity3D GetAngularVelocity() => new (this.OriginCoordinateSystem.GetRotationSubMatrix(), this.AxisAngleVector); + + /// + /// Computes the destination coordinate system, if this velocity is followed for a given amount of time. + /// + /// The span of time to compute over. + /// The destination coordinate system. + /// The unit of time should be the same as assumed for the linear and angular velocity vector (e.g., seconds). + public CoordinateSystem ComputeDestination(double time) + { + var destinationPoint = this.GetLinearVelocity().ComputeDestination(time); + var destinationRotation = this.GetAngularVelocity().ComputeDestination(time); + return new CoordinateSystem(destinationPoint, UnitVector3D.XAxis, UnitVector3D.YAxis, UnitVector3D.ZAxis).SetRotationSubMatrix(destinationRotation); + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/DepthImageRectangle3D.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/DepthImageRectangle3D.cs new file mode 100644 index 000000000..3ed994836 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/DepthImageRectangle3D.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using System; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi; + using Microsoft.Psi.Imaging; + + /// + /// Represents a depth image positioned in a 2D rectangle embedded in 3D space. + /// + public class DepthImageRectangle3D : IDisposable + { + /// + /// Initializes a new instance of the class. + /// + /// The rectangle in 3D space to contain the depth image. + /// The depth image. + public DepthImageRectangle3D(Rectangle3D rectangle, Shared depthImage) + { + this.Rectangle3D = rectangle; + this.DepthImage = depthImage.AddRef(); + } + + /// + /// Initializes a new instance of the class. + /// + /// The origin of the depth image rectangle. + /// The horizontal width axis of the depth image rectangle. + /// The vertical height axis of the depth image rectangle. + /// The left edge of the depth image rectangle (relative to origin along the width axis). + /// The bottom edge of the depth image rectangle (relative to origin along the height axis). + /// The width of the depth image rectangle. + /// The height of the depth image rectangle. + /// The depth image. + /// + /// The edges of the depth image rectangle are aligned to the specified width and height axes. + /// + public DepthImageRectangle3D( + Point3D origin, + UnitVector3D widthAxis, + UnitVector3D heightAxis, + double left, + double bottom, + double width, + double height, + Shared depthImage) + : this(new Rectangle3D(origin, widthAxis, heightAxis, left, bottom, width, height), depthImage) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The scale to use when calculating metric corner offsets from depth image pixel width and height. + /// The origin of the depth image rectangle. + /// The horizontal width axis of the depth image rectangle. + /// The vertical height axis of the depth image rectangle. + /// The depth image. + /// + /// The (left, bottom) corner of the depth image rectangle is set to the origin (0, 0), and (width, height) are calculated from multiplying + /// the depth image pixel width and height respectively by a scaling parameter. + /// The edges of the depth image rectangle are aligned to the specified width and height axes. + /// + public DepthImageRectangle3D(double scale, Point3D origin, UnitVector3D widthAxis, UnitVector3D heightAxis, Shared depthImage) + : this(new Rectangle3D(origin, widthAxis, heightAxis, 0, 0, depthImage.Resource.Width * scale, depthImage.Resource.Height * scale), depthImage) + { + } + + /// + /// Gets the rectangle. + /// + public Rectangle3D Rectangle3D { get; } + + /// + /// Gets the depth image. + /// + public Shared DepthImage { get; } + + /// + /// Tries to get the nearest pixel value, first projecting the input point into the plane of the 3D rectangle + /// to determine image space pixel coordinates. + /// + /// The desired point to project into the depth image rectangle and get a pixel value for. + /// Pixel value (output). + /// True if the point could be projected within the bounds of the depth image, false otherwise. + public bool TryGetPixel(Point3D point, out ushort? pixelValue) + { + if (this.TryGetPixelCoordinates(point, out int u, out int v)) + { + pixelValue = this.DepthImage.Resource.GetPixel(u, v); + return true; + } + else + { + pixelValue = null; + return false; + } + } + + /// + /// Tries to set the nearest pixel to a given value, first projecting the input point into the plane of the 3D rectangle + /// to determine image space pixel coordinates. + /// + /// The desired point to project into the depth image rectangle and set a pixel value for. + /// Value to set pixel to. + /// True if the point could be projected within the bounds of the depth image, false otherwise. + public bool TrySetPixel(Point3D point, ushort pixelValue) + { + if (this.TryGetPixelCoordinates(point, out int u, out int v)) + { + this.DepthImage.Resource.SetPixel(u, v, pixelValue); + return true; + } + else + { + return false; + } + } + + /// + public void Dispose() + { + this.DepthImage?.Dispose(); + } + + /// + /// Get the pixel coordinates that map to a given 3d point location. + /// + /// The 3d point in "global" coordinates. + /// The pixel u coordinate (output). + /// The pixel v coordinate (output). + /// The maximum allowed distance for projecting the point to the rectangular image plane. + /// True if the point could be projected within the bounds of the depth image rectangle, false otherwise. + public bool TryGetPixelCoordinates(Point3D point, out int u, out int v, double maxPlaneDistance = double.MaxValue) + { + // Project the given point to the corresponding plane. + var planeProjectedPoint = point.ProjectOn(Plane.FromPoints(this.Rectangle3D.TopLeft, this.Rectangle3D.TopRight, this.Rectangle3D.BottomRight)); + + // Check if the projected point is too far away from the original point. + if ((planeProjectedPoint - point).Length > maxPlaneDistance) + { + u = v = -1; + return false; + } + + // Construct a width axis pointing left-to-right and a height axis pointing top-to-bottom, + var widthVector = this.Rectangle3D.TopRight - this.Rectangle3D.TopLeft; + var heightVector = this.Rectangle3D.BottomLeft - this.Rectangle3D.TopLeft; + + // Compute the normalized projection to the width and height vectors of the rectangle + var cornerToPoint = planeProjectedPoint - this.Rectangle3D.TopLeft; + var widthVectorProjection = cornerToPoint.DotProduct(widthVector) / widthVector.DotProduct(widthVector); + var heightVectorProjection = cornerToPoint.DotProduct(heightVector) / heightVector.DotProduct(heightVector); + + // Convert to pixel coordinates + u = (int)(widthVectorProjection * this.DepthImage.Resource.Width); + v = (int)(heightVectorProjection * this.DepthImage.Resource.Height); + + if (u >= 0 && v >= 0 && u < this.DepthImage.Resource.Width && v < this.DepthImage.Resource.Height) + { + return true; + } + else + { + u = v = -1; + return false; + } + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/EncodedDepthImageRectangle3D.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/EncodedDepthImageRectangle3D.cs new file mode 100644 index 000000000..9a8274783 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/EncodedDepthImageRectangle3D.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using System; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi; + using Microsoft.Psi.Imaging; + + /// + /// Represents an encoded depth image positioned in a 2D rectangle embedded in 3D space. + /// + public class EncodedDepthImageRectangle3D : IDisposable + { + /// + /// Initializes a new instance of the class. + /// + /// The rectangle in 3D space to contain the encoded depth image. + /// The encoded depth image. + public EncodedDepthImageRectangle3D(Rectangle3D rectangle, Shared depthImage) + { + this.Rectangle3D = rectangle; + this.DepthImage = depthImage.AddRef(); + } + + /// + /// Initializes a new instance of the class. + /// + /// The origin of the encoded depth image rectangle. + /// The horizontal width axis of the encoded depth image rectangle. + /// The vertical height axis of the encoded depth image rectangle. + /// The left edge of the encoded depth image rectangle (relative to origin along the width axis). + /// The bottom edge of the encoded depth image rectangle (relative to origin along the height axis). + /// The width of the encoded depth image rectangle. + /// The height of the encoded depth image rectangle. + /// The encoded depth image. + /// + /// The edges of the encoded depth image rectangle are aligned to the specified width and height axes. + /// + public EncodedDepthImageRectangle3D( + Point3D origin, + UnitVector3D widthAxis, + UnitVector3D heightAxis, + double left, + double bottom, + double width, + double height, + Shared depthImage) + : this(new Rectangle3D(origin, widthAxis, heightAxis, left, bottom, width, height), depthImage) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The scale to use when calculating metric corner offsets from depth image pixel width and height. + /// The origin of the encoded depth image rectangle. + /// The horizontal width axis of the encoded depth image rectangle. + /// The vertical height axis of the encoded depth image rectangle. + /// The encoded depth image. + /// + /// The (left, bottom) corner of the encoded depth image rectangle is set to the origin (0, 0), and (width, height) are calculated from multiplying + /// the encoded depth image pixel width and height respectively by a scaling parameter. + /// The edges of the encoded depth image rectangle are aligned to the specified width and height axes. + /// + public EncodedDepthImageRectangle3D(double scale, Point3D origin, UnitVector3D widthAxis, UnitVector3D heightAxis, Shared depthImage) + : this(new Rectangle3D(origin, widthAxis, heightAxis, 0, 0, depthImage.Resource.Width * scale, depthImage.Resource.Height * scale), depthImage) + { + } + + /// + /// Gets the rectangle. + /// + public Rectangle3D Rectangle3D { get; } + + /// + /// Gets the encoded depth image. + /// + public Shared DepthImage { get; } + + /// + public void Dispose() + { + this.DepthImage?.Dispose(); + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/EncodedImageRectangle3D.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/EncodedImageRectangle3D.cs new file mode 100644 index 000000000..de7f45692 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/EncodedImageRectangle3D.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using System; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi; + using Microsoft.Psi.Imaging; + + /// + /// Represents an encoded image positioned in a 2D rectangle embedded in 3D space. + /// + public class EncodedImageRectangle3D : IDisposable + { + /// + /// Initializes a new instance of the class. + /// + /// The rectangle in 3D space to contain the encoded image. + /// The encoded image. + public EncodedImageRectangle3D(Rectangle3D rectangle, Shared image) + { + this.Rectangle3D = rectangle; + this.Image = image.AddRef(); + } + + /// + /// Initializes a new instance of the class. + /// + /// The origin of the encoded image rectangle. + /// The horizontal width axis of the encoded image rectangle. + /// The vertical height axis of the encoded image rectangle. + /// The left edge of the encoded image rectangle (relative to origin along the width axis). + /// The bottom edge of the encoded image rectangle (relative to origin along the height axis). + /// The width of the encoded image rectangle. + /// The height of the encoded image rectangle. + /// The encoded image. + /// + /// The edges of the encoded image rectangle are aligned to the specified width and height axes. + /// + public EncodedImageRectangle3D( + Point3D origin, + UnitVector3D widthAxis, + UnitVector3D heightAxis, + double left, + double bottom, + double width, + double height, + Shared image) + : this(new Rectangle3D(origin, widthAxis, heightAxis, left, bottom, width, height), image) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The scale to use when calculating metric corner offsets from image pixel width and height. + /// The origin of the encoded image rectangle. + /// The horizontal width axis of the encoded image rectangle. + /// The vertical height axis of the encoded image rectangle. + /// The encoded image. + /// + /// The (left, bottom) corner of the encoded image rectangle is set to the origin (0, 0), and (width, height) are calculated from multiplying + /// the encoded image pixel width and height respectively by a scaling parameter. + /// The edges of the encoded image rectangle are aligned to the specified width and height axes. + /// + public EncodedImageRectangle3D(double scale, Point3D origin, UnitVector3D widthAxis, UnitVector3D heightAxis, Shared image) + : this(new Rectangle3D(origin, widthAxis, heightAxis, 0, 0, image.Resource.Width * scale, image.Resource.Height * scale), image) + { + } + + /// + /// Gets the rectangle. + /// + public Rectangle3D Rectangle3D { get; } + + /// + /// Gets the encoded image. + /// + public Shared Image { get; } + + /// + public void Dispose() + { + this.Image?.Dispose(); + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/ImageRectangle3D.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/ImageRectangle3D.cs new file mode 100644 index 000000000..2e1dca48e --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/ImageRectangle3D.cs @@ -0,0 +1,199 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using System; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi; + using Microsoft.Psi.Imaging; + + /// + /// Represents an image positioned in a 2D rectangle embedded in 3D space. + /// + public class ImageRectangle3D : IDisposable + { + /// + /// Initializes a new instance of the class. + /// + /// The rectangle in 3D space to contain the image. + /// The image. + public ImageRectangle3D(Rectangle3D rectangle, Shared image) + { + this.Rectangle3D = rectangle; + this.Image = image.AddRef(); + } + + /// + /// Initializes a new instance of the class. + /// + /// The origin of the image rectangle. + /// The horizontal width axis of the image rectangle. + /// The vertical height axis of the image rectangle. + /// The left edge of the image rectangle (relative to origin along the width axis). + /// The bottom edge of the image rectangle (relative to origin along the height axis). + /// The width of the image rectangle. + /// The height of the image rectangle. + /// The image. + /// + /// The edges of the image rectangle are aligned to the specified width and height axes. + /// + public ImageRectangle3D( + Point3D origin, + UnitVector3D widthAxis, + UnitVector3D heightAxis, + double left, + double bottom, + double width, + double height, + Shared image) + : this(new Rectangle3D(origin, widthAxis, heightAxis, left, bottom, width, height), image) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The scale to use when calculating metric corner offsets from image pixel width and height. + /// The origin of the image rectangle. + /// The horizontal width axis of the image rectangle. + /// The vertical height axis of the image rectangle. + /// The image. + /// + /// The (left, bottom) corner of the image rectangle is set to the origin (0, 0), and (width, height) are calculated + /// from multiplying the image pixel width and height respectively by a scaling parameter. + /// The edges of the image rectangle are aligned to the specified width and height axes. + /// + public ImageRectangle3D(double scale, Point3D origin, UnitVector3D widthAxis, UnitVector3D heightAxis, Shared image) + : this(new Rectangle3D(origin, widthAxis, heightAxis, 0, 0, image.Resource.Width * scale, image.Resource.Height * scale), image) + { + } + + /// + /// Gets the rectangle. + /// + public Rectangle3D Rectangle3D { get; } + + /// + /// Gets the image. + /// + public Shared Image { get; } + + /// + /// Tries to get the nearest pixel value, first projecting the input point into the plane of the 3D rectangle + /// to determine image space pixel coordinates. + /// + /// The desired point to project into the image rectangle and get a pixel value for. + /// Red channel's value (output). + /// Green channel's value (output). + /// Blue channel's value (output). + /// Alpha channel's value (output). + /// True if the point could be projected within the bounds of the image, false otherwise. + public bool TryGetPixel(Point3D point, out int r, out int g, out int b, out int a) + { + if (this.TryGetPixelCoordinates(point, out int u, out int v)) + { + (r, g, b, a) = this.Image.Resource.GetPixel(u, v); + return true; + } + else + { + r = g = b = a = -1; + return false; + } + } + + /// + /// Tries to set the nearest pixel to a given value, first projecting the input point into the plane of the 3D rectangle + /// to determine image space pixel coordinates. + /// + /// The desired point to project into the image rectangle and set a pixel value for. + /// Red channel's value. + /// Green channel's value. + /// Blue channel's value. + /// Alpha channel's value. + /// True if the point could be projected within the bounds of the image, false otherwise. + public bool TrySetPixel(Point3D point, int r, int g, int b, int a) + { + if (this.TryGetPixelCoordinates(point, out int u, out int v)) + { + this.Image.Resource.SetPixel(u, v, r, g, b, a); + return true; + } + else + { + return false; + } + } + + /// + /// Tries to set the nearest pixel to a given gray value, first projecting the input point into the plane of the 3D rectangle + /// to determine image space pixel coordinates. + /// + /// The desired point to project into the image rectangle and set a pixel value for. + /// Gray value to set pixel to. + /// True if the point could be projected within the bounds of the image, false otherwise. + public bool TrySetPixel(Point3D point, int gray) + { + if (this.TryGetPixelCoordinates(point, out int u, out int v)) + { + this.Image.Resource.SetPixel(u, v, gray); + return true; + } + else + { + return false; + } + } + + /// + public void Dispose() + { + this.Image?.Dispose(); + } + + /// + /// Get the pixel coordinates that map to a given 3d point location. + /// + /// The 3d point in "global" coordinates. + /// The pixel u coordinate (output). + /// The pixel v coordinate (output). + /// The maximum allowed distance for projecting the point to the rectangular image plane. + /// True if the point could be projected within the bounds of the image rectangle, false otherwise. + public bool TryGetPixelCoordinates(Point3D point, out int u, out int v, double maxPlaneDistance = double.MaxValue) + { + // Project the given point to the corresponding plane. + var planeProjectedPoint = point.ProjectOn(Plane.FromPoints(this.Rectangle3D.TopLeft, this.Rectangle3D.TopRight, this.Rectangle3D.BottomRight)); + + // Check if the projected point is too far away from the original point. + if ((planeProjectedPoint - point).Length > maxPlaneDistance) + { + u = v = -1; + return false; + } + + // Construct a width axis pointing left-to-right and a height axis pointing top-to-bottom, + var widthVector = this.Rectangle3D.TopRight - this.Rectangle3D.TopLeft; + var heightVector = this.Rectangle3D.BottomLeft - this.Rectangle3D.TopLeft; + + // Compute the normalized projection to the width and height vectors of the rectangle + var cornerToPoint = planeProjectedPoint - this.Rectangle3D.TopLeft; + var widthVectorProjection = cornerToPoint.DotProduct(widthVector) / widthVector.DotProduct(widthVector); + var heightVectorProjection = cornerToPoint.DotProduct(heightVector) / heightVector.DotProduct(heightVector); + + // Convert to pixel coordinates + u = (int)(widthVectorProjection * this.Image.Resource.Width); + v = (int)(heightVectorProjection * this.Image.Resource.Height); + + if (u >= 0 && v >= 0 && u < this.Image.Resource.Width && v < this.Image.Resource.Height) + { + return true; + } + else + { + u = v = -1; + return false; + } + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/Operators.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/Operators.cs new file mode 100644 index 000000000..b76a5598e --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Images/Operators.cs @@ -0,0 +1,106 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using Microsoft.Psi; + using Microsoft.Psi.Imaging; + + /// + /// Implements operators for processing spatial image types. + /// + public static partial class Operators + { + /// + /// Encodes an image rectangle using a specified image encoder. + /// + /// The source stream of image rectangles. + /// The image encoder to use. + /// An optional delivery policy. + /// A stream of encoded image rectangles. + public static IProducer Encode( + this IProducer source, + IImageToStreamEncoder encoder, + DeliveryPolicy deliveryPolicy = null) + { + return source.Process( + (imageRectangle, envelope, emitter) => + { + var image = imageRectangle.Image.Resource; + using var encodedImage = EncodedImagePool.GetOrCreate(image.Width, image.Height, image.PixelFormat); + encodedImage.Resource.EncodeFrom(image, encoder); + emitter.Post(new EncodedImageRectangle3D(imageRectangle.Rectangle3D, encodedImage), envelope.OriginatingTime); + }, + deliveryPolicy); + } + + /// + /// Encodes a depth image rectangle using a specified depth image encoder. + /// + /// The source stream of depth image rectangles. + /// The depth image encoder to use. + /// An optional delivery policy. + /// A stream of encoded depth image rectangles. + public static IProducer Encode( + this IProducer source, + IDepthImageToStreamEncoder encoder, + DeliveryPolicy deliveryPolicy = null) + { + return source.Process( + (depthImageRectangle, envelope, emitter) => + { + var depthImage = depthImageRectangle.DepthImage.Resource; + using var encodedDepthImage = EncodedDepthImagePool.GetOrCreate(depthImage.Width, depthImage.Height); + encodedDepthImage.Resource.EncodeFrom(depthImage, encoder); + emitter.Post(new EncodedDepthImageRectangle3D(depthImageRectangle.Rectangle3D, encodedDepthImage), envelope.OriginatingTime); + }, + deliveryPolicy); + } + + /// + /// Decodes an encoded image image rectangle using a specified image decoder. + /// + /// The source stream of encoded image rectangles. + /// The image decoder to use. + /// An optional delivery policy. + /// A stream of decoded image rectangles. + public static IProducer Decode( + this IProducer source, + IImageFromStreamDecoder decoder, + DeliveryPolicy deliveryPolicy = null) + { + return source.Process( + (encodedImageRectangle, envelope, emitter) => + { + var encodedImage = encodedImageRectangle.Image.Resource; + using var image = ImagePool.GetOrCreate(encodedImage.Width, encodedImage.Height, encodedImage.PixelFormat); + image.Resource.DecodeFrom(encodedImage, decoder); + emitter.Post(new ImageRectangle3D(encodedImageRectangle.Rectangle3D, image), envelope.OriginatingTime); + }, + deliveryPolicy); + } + + /// + /// Decodes an encoded depth image image rectangle using a specified depth image decoder. + /// + /// The source stream of encoded depth image rectangles. + /// The depth image decoder to use. + /// An optional delivery policy. + /// A stream of decoded depth image rectangles. + public static IProducer Decode( + this IProducer source, + IDepthImageFromStreamDecoder decoder, + DeliveryPolicy deliveryPolicy = null) + { + return source.Process( + (encodedDepthImageRectangle, envelope, emitter) => + { + var encodedDepthImage = encodedDepthImageRectangle.DepthImage.Resource; + using var depthImage = DepthImagePool.GetOrCreate(encodedDepthImage.Width, encodedDepthImage.Height); + depthImage.Resource.DecodeFrom(encodedDepthImage, decoder); + emitter.Post(new DepthImageRectangle3D(encodedDepthImageRectangle.Rectangle3D, depthImage), envelope.OriginatingTime); + }, + deliveryPolicy); + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/LinearVelocity3D.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/LinearVelocity3D.cs new file mode 100644 index 000000000..6d64f9fe1 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/LinearVelocity3D.cs @@ -0,0 +1,101 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using System; + using MathNet.Spatial.Euclidean; + + /// + /// Represents a linear 3D velocity rooted at a point in space. + /// + public readonly struct LinearVelocity3D : IEquatable + { + /// + /// The point of origin. + /// + public readonly Point3D Origin; + + /// + /// The velocity vector. Describes the direction of motion as well as the speed (length of the vector). + /// + public readonly Vector3D Vector; + + /// + /// Initializes a new instance of the struct. + /// + /// The origin point. + /// The velocity vector. + public LinearVelocity3D(Point3D origin, Vector3D vector) + { + this.Origin = origin; + this.Vector = vector; + } + + /// + /// Initializes a new instance of the struct. + /// + /// The origin of the velocity. + /// The unit vector indicating the direction of velocity. + /// The speed in the specified direction. + public LinearVelocity3D(Point3D origin, UnitVector3D unitVector, double speed) + : this(origin, unitVector.ScaleBy(speed)) + { + } + + /// + /// Initializes a new instance of the struct. + /// + /// The origin of the velocity. + /// A destination point. + /// The time it took to reach that destination point. + public LinearVelocity3D(Point3D origin, Point3D destinationPoint, TimeSpan time) + { + this.Origin = origin; + var directionVector = destinationPoint - origin; + var speed = directionVector.Length / time.TotalSeconds; + this.Vector = directionVector.Normalize().ScaleBy(speed); + } + + /// + /// Gets the magnitude of the velocity. + /// + public double Speed => this.Vector.Length; + + /// + /// Returns a value indicating whether the specified velocities are the same. + /// + /// The first velocity. + /// The second velocity. + /// True if the velocities are the same; otherwise false. + public static bool operator ==(LinearVelocity3D left, LinearVelocity3D right) => left.Equals(right); + + /// + /// Returns a value indicating whether the specified velocities are different. + /// + /// The first velocity. + /// The second velocity. + /// True if the velocities are different; otherwise false. + public static bool operator !=(LinearVelocity3D left, LinearVelocity3D right) => !left.Equals(right); + + /// + public bool Equals(LinearVelocity3D other) => this.Origin == other.Origin && this.Vector == other.Vector; + + /// + public override bool Equals(object obj) => obj is LinearVelocity3D other && this.Equals(other); + + /// + public override int GetHashCode() => HashCode.Combine(this.Origin, this.Vector); + + /// + /// Computes the destination point, if this velocity is followed for a given amount of time. + /// + /// The span of time to compute over. + /// The destination point. + /// The unit of time should be the same as assumed for the velocity vector (e.g., seconds). + public Point3D ComputeDestination(double time) + { + return this.Origin + this.Vector.ScaleBy(time); + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Mesh3D.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Mesh3D.cs new file mode 100644 index 000000000..fe4ef1316 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Mesh3D.cs @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using MathNet.Spatial.Euclidean; + + /// + /// Represents a 3-dimensional surface mesh. + /// + public class Mesh3D + { + /// + /// Initializes a new instance of the class. + /// + /// Vertex points. + /// Triangle indices. + public Mesh3D(Point3D[] vertices, uint[] triangleIndices) + { + this.Vertices = vertices; + this.TriangleIndices = triangleIndices; + } + + /// + /// Gets mesh vertex points. + /// + public Point3D[] Vertices { get; private set; } + + /// + /// Gets mesh triangle indices. + /// + public uint[] TriangleIndices { get; private set; } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Microsoft.Psi.Spatial.Euclidean.csproj b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Microsoft.Psi.Spatial.Euclidean.csproj new file mode 100644 index 000000000..3a3a95de0 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Microsoft.Psi.Spatial.Euclidean.csproj @@ -0,0 +1,46 @@ + + + + netstandard2.0 + Provides types and methods for representing various 3D shapes and spatial operations. This project extends what is already provided in MathNet.Spatial.Euclidean. + true + + + + ..\..\..\Build\Microsoft.Psi.ruleset + bin\Debug\netstandard2.0\Microsoft.Psi.Spatial.Euclidean.xml + true + true + + + + + ..\..\..\Build\Microsoft.Psi.ruleset + bin\Release\netstandard2.0\Microsoft.Psi.Spatial.Euclidean.xml + true + true + + + + + + + + + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + + + + diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Operators.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Operators.cs new file mode 100644 index 000000000..a8f334380 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Operators.cs @@ -0,0 +1,116 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using System; + using MathNet.Numerics.LinearAlgebra; + using MathNet.Spatial.Euclidean; + + /// + /// Implements various operators for manipulating spatial euclidean entities. + /// + public static partial class Operators + { + /// + /// Transforms a point cloud by a coordinate system. + /// + /// The coordinate system. + /// The point cloud. + /// The transformed point cloud. + public static PointCloud3D Transform(this CoordinateSystem coordinateSystem, PointCloud3D pointCloud3D) + => pointCloud3D.TransformBy(coordinateSystem); + + /// + /// Transforms a rectangle by a coordinate system. + /// + /// The coordinate system. + /// The rectangle. + /// The transformed rectangle. + public static Rectangle3D Transform(this CoordinateSystem coordinateSystem, Rectangle3D rectangle3D) + { + var newBottomLeft = rectangle3D.BottomLeft.TransformBy(coordinateSystem); + var newBottomRight = rectangle3D.BottomRight.TransformBy(coordinateSystem); + var newTopLeft = rectangle3D.TopLeft.TransformBy(coordinateSystem); + var widthAxis = newBottomRight - newBottomLeft; + var heightAxis = newTopLeft - newBottomLeft; + var width = widthAxis.Length; + var height = heightAxis.Length; + return new Rectangle3D(newBottomLeft, widthAxis.Normalize(), heightAxis.Normalize(), 0, 0, width, height); + } + + /// + /// Transforms a box by a coordinate system. + /// + /// The coordinate system. + /// The box. + /// The transformed box. + public static Box3D Transform(this CoordinateSystem coordinateSystem, Box3D box3D) + => box3D.TransformBy(coordinateSystem); + + /// + /// Computes the linear velocity of a coordinate system. + /// + /// The source stream of coordinate systems. + /// An optional delivery policy parameter. + /// A stream containing the linear velocity of the specified point. + public static IProducer GetLinearVelocity3D(this IProducer source, DeliveryPolicy deliveryPolicy = null) => + source.GetLinearVelocity3D(cs => cs?.Origin, deliveryPolicy); + + /// + /// Computes the linear velocity of an object. + /// + /// The type of the object. + /// The source stream of points. + /// A function that specifies the location of the object. + /// An optional delivery policy parameter. + /// A stream containing the linear velocity of the specified point. + public static IProducer GetLinearVelocity3D(this IProducer source, Func getLocation, DeliveryPolicy deliveryPolicy = null) + { + var lastPoint3D = default(Point3D?); + var lastDateTime = DateTime.MinValue; + return source.Process( + (t, envelope, emitter) => + { + var point3D = getLocation(t); + if (point3D.HasValue && lastPoint3D.HasValue && lastDateTime > DateTime.MinValue) + { + var velocity3D = new LinearVelocity3D(point3D.Value, (point3D.Value - lastPoint3D.Value).Normalize(), point3D.Value.DistanceTo(lastPoint3D.Value) / (envelope.OriginatingTime - lastDateTime).TotalSeconds); + emitter.Post(velocity3D, envelope.OriginatingTime); + } + + lastPoint3D = point3D; + lastDateTime = envelope.OriginatingTime; + }, + deliveryPolicy); + } + + /// + /// Gets a rotation matrix corresponding to a forward vector. + /// + /// The specified forward vector. + /// The corresponding rotation matrix. + /// The X axis of the matrix will correspond to the specified forward vector. + public static Matrix ToRotationMatrix(this Vector3D forward) => forward.Normalize().ToRotationMatrix(); + + /// + /// Gets a rotation matrix corresponding to a forward vector. + /// + /// The specified forward vector. + /// The corresponding rotation matrix. + /// The X axis of the matrix will correspond to the specified forward vector. + public static Matrix ToRotationMatrix(this UnitVector3D forward) + { + // Compute left and up directions from the given forward direction. + var left = UnitVector3D.ZAxis.CrossProduct(forward); + var up = forward.CrossProduct(left); + + // Create a corresponding 3x3 matrix from the 3 directions. + var rotationMatrix = Matrix.Build.Dense(3, 3); + rotationMatrix.SetColumn(0, forward.ToVector()); + rotationMatrix.SetColumn(1, left.ToVector()); + rotationMatrix.SetColumn(2, up.ToVector()); + return rotationMatrix; + } + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/PointCloud3D.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/PointCloud3D.cs new file mode 100644 index 000000000..505d91de7 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/PointCloud3D.cs @@ -0,0 +1,364 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using System.Collections; + using System.Collections.Generic; + using System.Linq; + using MathNet.Numerics.LinearAlgebra; + using MathNet.Spatial.Euclidean; + using Microsoft.Psi.Calibration; + using Microsoft.Psi.Imaging; + + /// + /// Represents a point cloud in 3D space. + /// + public class PointCloud3D : IEnumerable + { + /// + /// Gets the empty point cloud. + /// + public static readonly PointCloud3D Empty = new (); + + private readonly Matrix points = null; + + /// + /// Initializes a new instance of the class. + /// + /// The set of points. + public PointCloud3D(IEnumerable points) + { + var count = points.Count(); + if (count == 0) + { + return; + } + + this.points = Matrix.Build.Dense(4, count); + int i = 0; + foreach (var point in points) + { + this.points[0, i] = point.X; + this.points[1, i] = point.Y; + this.points[2, i] = point.Z; + this.points[3, i] = 1; + i++; + } + } + + /// + /// Initializes a new instance of the class. + /// + /// This private constructor creates an empty point cloud. + private PointCloud3D() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The set of points expressed in a count x 4 matrix. + private PointCloud3D(Matrix points) + { + this.points = points; + } + + /// + /// Gets a value indicating whether the point cloud is empty. + /// + public bool IsEmpty => this.points == null; + + /// + /// Gets the number of points in the point cloud. + /// + public int NumberOfPoints => this.points != null ? this.points.ColumnCount : 0; + + /// + /// Create a point cloud from a shared depth image. + /// + /// The depth image. + /// The depth camera intrinsics. + /// An optional parameter representing a scale factor to apply to the depth values. + /// An optional parameter to specify how sparsely to sample pixels (by default 1). + /// An optional parameter that specifies whether to undistort when projecting through the intrinsics. + /// An optional parameter that indicates to return only robust points (where the nearby depth estimates are not zero). + /// The corresponding point cloud. + public static PointCloud3D FromDepthImage(Shared depthImage, ICameraIntrinsics depthCameraIntrinsics, double scaleFactor = 1, int sparsity = 1, bool undistort = true, bool robustPointsOnly = false) + => FromDepthImage(depthImage?.Resource, depthCameraIntrinsics, scaleFactor, sparsity, undistort, robustPointsOnly); + + /// + /// Create a point cloud from a shared depth image. + /// + /// The depth image. + /// A camera space mapping matrix. + /// An optional parameter representing a scale factor to apply to the depth values (by default 1). + /// An optional parameter to specify how sparsely to sample pixels (by default 1). + /// An optional parameter that indicates to return only robust points (where the nearby depth estimates are not zero). + /// The corresponding point cloud. + public static PointCloud3D FromDepthImage(Shared depthImage, Point3D[,] cameraSpaceMapping, double scalingFactor = 1, int sparsity = 1, bool robustPointsOnly = false) + => FromDepthImage(depthImage?.Resource, cameraSpaceMapping, scalingFactor, sparsity, robustPointsOnly); + + /// + /// Create a point cloud from a depth image. + /// + /// The depth image. + /// The depth camera intrinsics. + /// An optional parameter representing a scale factor to apply to the depth values (by default 1). + /// An optional parameter to specify how sparsely to sample pixels (by default 1). + /// An optional parameter that specifies whether to undistort when projecting through the intrinsics. + /// An optional parameter that indicates to return only robust points (where the nearby depth estimates are not zero). + /// The corresponding point cloud. + public static PointCloud3D FromDepthImage(DepthImage depthImage, ICameraIntrinsics depthCameraIntrinsics, double scalingFactor = 1, int sparsity = 1, bool undistort = true, bool robustPointsOnly = false) + => FromDepthImage(depthImage, depthCameraIntrinsics?.GetPixelToCameraSpaceMapping(undistort), scalingFactor, sparsity, robustPointsOnly); + + /// + /// Create a point cloud from a depth image. + /// + /// The depth image. + /// A camera space mapping matrix. + /// An optional parameter representing a scale factor to apply to the depth values (by default 1). + /// An optional parameter to specify how sparsely to sample pixels (by default 1). + /// An optional parameter that indicates to return only robust points (where the nearby depth estimates are not zero). + /// The corresponding point cloud. + public static PointCloud3D FromDepthImage(DepthImage depthImage, Point3D[,] cameraSpaceMapping, double scalingFactor = 1, int sparsity = 1, bool robustPointsOnly = false) + { + if (depthImage == null || cameraSpaceMapping == null) + { + return Empty; + } + + unsafe + { + // First count how many non-zero depth points are there in the image region + int count = 0; + ushort* depthFrame = (ushort*)depthImage.ImageData.ToPointer(); + for (int iy = 0; iy < depthImage.Height; iy += sparsity) + { + var previousRow = (iy - 1) * depthImage.Width; + var nextRow = (iy + 1) * depthImage.Width; + var row = iy * depthImage.Width; + + for (int ix = 0; ix < depthImage.Width; ix += sparsity) + { + if (robustPointsOnly) + { + if (iy > 0 && iy < depthImage.Height - 1 && + ix > 0 && ix < depthImage.Width - 1 && + depthFrame[previousRow + ix - 1] != 0 && + depthFrame[previousRow + ix] != 0 && + depthFrame[previousRow + ix + 1] != 0 && + depthFrame[row + ix - 1] != 0 && + depthFrame[row + ix] != 0 && + depthFrame[row + ix + 1] != 0 && + depthFrame[nextRow + ix - 1] != 0 && + depthFrame[nextRow + ix] != 0 && + depthFrame[nextRow + ix + 1] != 0) + { + count++; + } + } + else if (depthFrame[row + ix] != 0) + { + count++; + } + } + } + + if (count == 0) + { + return Empty; + } + + // Then iterate again and compute the points + var points = Matrix.Build.Dense(4, count); + int index = 0; + for (int iy = 0; iy < depthImage.Height; iy += sparsity) + { + var previousRow = (iy - 1) * depthImage.Width; + var nextRow = (iy + 1) * depthImage.Width; + var row = iy * depthImage.Width; + for (int ix = 0; ix < depthImage.Width; ix += sparsity) + { + var d = depthFrame[row + ix]; + var isPointEstimate = robustPointsOnly ? + iy > 0 && iy < depthImage.Height - 1 && + ix > 0 && ix < depthImage.Width - 1 && + depthFrame[previousRow + ix - 1] != 0 && + depthFrame[previousRow + ix] != 0 && + depthFrame[previousRow + ix + 1] != 0 && + depthFrame[row + ix - 1] != 0 && + d != 0 && + depthFrame[row + ix + 1] != 0 && + depthFrame[nextRow + ix - 1] != 0 && + depthFrame[nextRow + ix] != 0 && + depthFrame[nextRow + ix + 1] != 0 + : + d != 0; + if (isPointEstimate) + { + var dscaled = d * scalingFactor; + var cameraSpacePoint = cameraSpaceMapping[ix, iy]; + points[0, index] = dscaled * cameraSpacePoint.X; + points[1, index] = dscaled * cameraSpacePoint.Y; + points[2, index] = dscaled * cameraSpacePoint.Z; + points[3, index] = 1; + index++; + } + } + } + + return new (points); + } + } + + /// + /// Computes the distance from every point in the cloud to a specified . + /// + /// The to compute the distance to. + /// A vector containing distances from every point in the cloud to the . + public Vector DistanceTo(Ray3D ray3D) + { + if (this.IsEmpty) + { + return null; + } + + // The algorithm computes distances from points to the ray using linear algebra, + // as described in https://en.wikipedia.org/wiki/Distance_from_a_point_to_a_line + var p = Matrix.Build.DenseOfRowArrays( + this.points.Row(0).ToArray(), + this.points.Row(1).ToArray(), + this.points.Row(2).ToArray()); + var a = Vector.Build.DenseOfArray(new double[] { ray3D.ThroughPoint.X, ray3D.ThroughPoint.Y, ray3D.ThroughPoint.Z }); + var n = Vector.Build.DenseOfArray(new double[] { ray3D.Direction.X, ray3D.Direction.Y, ray3D.Direction.Z }); + + var pointsMinusA = p - Matrix.Build.Dense(3, this.points.ColumnCount, (r, c) => a[r]); + + var pointsMinusADotProductN = pointsMinusA.TransposeThisAndMultiply(n); + var pointsMinusADotProductNTimesN = Matrix.Build.DenseOfRowArrays( + pointsMinusADotProductN.Multiply(n[0]).ToArray(), + pointsMinusADotProductN.Multiply(n[1]).ToArray(), + pointsMinusADotProductN.Multiply(n[2]).ToArray()); + + var final = pointsMinusA - pointsMinusADotProductNTimesN; + var distances = final.PointwiseMultiply(final).ColumnSums().PointwiseSqrt(); + + return distances; + } + + /// + /// Gets the closest point in the cloud to a specified . + /// + /// The ray to compute the closest point to. + /// The closest point in the cloud to the specified ray. + /// + /// If multiple points are at the minimum distance, the method returns the + /// first of these (in the order the points appear in the cloud). + /// + public Point3D? ClosestPointTo(Ray3D ray3D) + { + if (this.IsEmpty) + { + return null; + } + + var distances = this.DistanceTo(ray3D); + var minDistance = double.MaxValue; + var minIndex = -1; + for (int i = 0; i < distances.Count; i++) + { + if (distances[i] < minDistance) + { + minDistance = distances[i]; + minIndex = i; + } + } + + return new Point3D(this.points[0, minIndex], this.points[1, minIndex], this.points[2, minIndex]); + } + + /// + /// Gets the intersection between the point cloud and a specified 3D box. + /// + /// The specify 3D box. + /// A point cloud that falls within the specified 3D box. + public PointCloud3D IntersectionWith(Box3D box3D) + { + if (this.IsEmpty) + { + return Empty; + } + + // Get the points in box coordinates + var pointsInBox3DCoordinates = box3D.Pose.Inverse().Multiply(this.points); + + var insidePoints = new List(); + for (int i = 0; i < pointsInBox3DCoordinates.ColumnCount; i++) + { + if (pointsInBox3DCoordinates[0, i] >= box3D.Bounds.Min.X && + pointsInBox3DCoordinates[0, i] <= box3D.Bounds.Max.X && + pointsInBox3DCoordinates[1, i] >= box3D.Bounds.Min.Y && + pointsInBox3DCoordinates[1, i] <= box3D.Bounds.Max.Y && + pointsInBox3DCoordinates[2, i] >= box3D.Bounds.Min.Z && + pointsInBox3DCoordinates[2, i] <= box3D.Bounds.Max.Z) + { + insidePoints.Add(new double[] { this.points[0, i], this.points[1, i], this.points[2, i], 1 }); + } + } + + return (insidePoints.Count > 0) ? new PointCloud3D(Matrix.Build.DenseOfColumnArrays(insidePoints.ToArray())) : new (); + } + + /// + /// Transforms the point cloud by a coordinate system. + /// + /// The coordinate system to transform the point cloud by. + /// The transformed point cloud. + public PointCloud3D TransformBy(CoordinateSystem coordinateSystem) => + new (this.IsEmpty ? null : coordinateSystem.Multiply(this.points)); + + /// + /// Scales the point cloud by a specified value. + /// + /// The value to scale the point cloud by. + /// The scaled point cloud. + public PointCloud3D Multiply(double scalar) => + new (this.points?.Multiply(scalar)); + + /// + /// Converts the point cloud to a list of . + /// + /// The list of . + public IEnumerable ToList() + { + if (this.IsEmpty) + { + return Enumerable.Empty(); + } + + var list = new List(this.points.ColumnCount); + for (int i = 0; i < this.points.ColumnCount; i++) + { + list[i] = new (this.points[0, i], this.points[1, i], this.points[2, i]); + } + + return list; + } + + /// + public IEnumerator GetEnumerator() + { + if (this.points != null) + { + for (int i = 0; i < this.points.ColumnCount; i++) + { + yield return new (this.points[0, i], this.points[1, i], this.points[2, i]); + } + } + } + + /// + IEnumerator IEnumerable.GetEnumerator() => this.GetEnumerator(); + } +} \ No newline at end of file diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Rectangle3D.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Rectangle3D.cs new file mode 100644 index 000000000..93da9be21 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Rectangle3D.cs @@ -0,0 +1,232 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using System; + using System.Collections.Generic; + using MathNet.Spatial.Euclidean; + + /// + /// Represents a 2-dimensional rectangle embedded in 3D space. + /// + /// + /// The rectangle is characterized by its four corner points. + /// + public readonly struct Rectangle3D : IEquatable + { + /// + /// Gets the top-left corner of the rectangle. + /// + public readonly Point3D TopLeft; + + /// + /// Gets the bottom-left corner of the rectangle. + /// + public readonly Point3D BottomLeft; + + /// + /// Gets the top-right corner of the rectangle. + /// + public readonly Point3D TopRight; + + /// + /// Gets the bottom-right corner of the rectangle. + /// + public readonly Point3D BottomRight; + + /// + /// Gets whether or not this rectangle is degenerate (0-length width or height). + /// + public readonly bool IsDegenerate; + + /// + /// Gets the width. + /// + public readonly double Width; + + /// + /// Gets the height. + /// + public readonly double Height; + + /// + /// Initializes a new instance of the struct. + /// + /// The origin of the rectangle. + /// The horizontal width axis of the rectangle. + /// The vertical height axis of the rectangle. + /// The left edge of the rectangle (relative to origin along the width axis). + /// The bottom edge of the rectangle (relative to origin along the height axis). + /// The width of the rectangle (must be positive). + /// The height of the rectangle (must be positive). + /// + /// The edges of the rectangle are aligned to the specified width and height axes, which must be perpendicular. + /// + public Rectangle3D( + Point3D origin, + UnitVector3D widthAxis, + UnitVector3D heightAxis, + double left, + double bottom, + double width, + double height) + { + if (!widthAxis.IsPerpendicularTo(heightAxis, 0.001)) + { + throw new ArgumentException("The width and height axes must be perpendicular to each other."); + } + + if (width < 0 || height < 0) + { + throw new ArgumentException("Width and height must be non-negative values"); + } + + if (width == 0 || height == 0) + { + this.IsDegenerate = true; + } + else + { + this.IsDegenerate = false; + } + + this.Width = width; + this.Height = height; + this.BottomLeft = origin + widthAxis.ScaleBy(left) + heightAxis.ScaleBy(bottom); + var widthVector = widthAxis.ScaleBy(width); + var heightVector = heightAxis.ScaleBy(height); + this.BottomRight = this.BottomLeft + widthVector; + this.TopLeft = this.BottomLeft + heightVector; + this.TopRight = this.TopLeft + widthVector; + } + + /// + /// Returns a value indicating whether the specified rectangles are the same. + /// + /// The first rectangle. + /// The second rectangle. + /// True if the rectangles are the same; otherwise false. + public static bool operator ==(Rectangle3D left, Rectangle3D right) => left.Equals(right); + + /// + /// Returns a value indicating whether the specified rectangles are different. + /// + /// The first rectangle. + /// The second rectangle. + /// True if the rectangles are different; otherwise false. + public static bool operator !=(Rectangle3D left, Rectangle3D right) => !left.Equals(right); + + /// + /// Gets the center of the 3D rectangle. + /// + /// The center point of the rectangle. + public Point3D GetCenter() + => new ( + (this.TopLeft.X + this.BottomRight.X) / 2, + (this.TopLeft.Y + this.BottomRight.Y) / 2, + (this.TopLeft.Z + this.BottomRight.Z) / 2); + + /// + /// Gets the corners of the rectangle. + /// + /// An enumeration containing the rectangle corners. + public IEnumerable GetCorners() + { + yield return this.TopLeft; + yield return this.TopRight; + yield return this.BottomRight; + yield return this.BottomLeft; + } + + /// + /// Computes the intersection between a 3D ray and this planar rectangle. + /// + /// The 3D ray. + /// An optional tolerance to account for floating point errors. + /// The intersection point, if one exists. + public Point3D? IntersectionWith(Ray3D ray3D, double tolerance = 1E-10) + { + // compute the plane of the rectangle from three corner points + var plane = Plane.FromPoints(this.TopLeft, this.TopRight, this.BottomRight); + + // compute the intersection of the ray with the plane of the rectangle + var intersection = plane.IntersectionWith(ray3D, tolerance); + + // check whether the intersection is in the direction the ray is pointing + // or in the opposite direction. + if ((intersection - ray3D.ThroughPoint).DotProduct(ray3D.Direction) < 0) + { + // if the intersection is in the opposite direction, return null, + // as this method should compute only intersection points "forward" + // on the ray. + return null; + } + + return this.Contains(intersection, tolerance) ? intersection : null; + } + + /// + /// Gets the in which the lies. + /// + /// The in which the lies. + public Plane GetPlane() + => Plane.FromPoints(this.TopLeft, this.TopRight, this.BottomLeft); + + /// + /// Determines whether the rectangle contains a specified point. + /// + /// The point. + /// An optional tolerance to account for floating point errors. + /// True if the rectangle contains the specified point, false otherwise. + public bool Contains(Point3D point3D, double tolerance = 1E-10) + { + // Check first that the point is in the plane + var distance = this.GetPlane().AbsoluteDistanceTo(point3D); + + if (distance > tolerance) + { + return false; + } + + // Construct a width vector pointing left-to-right and a height vector pointing top-to-bottom (rooted in the top-left corner), + var widthVector = this.TopRight - this.TopLeft; + var heightVector = this.BottomLeft - this.TopLeft; + + // Construct a vector pointing from the top-left corner to the intersection point. + // If the projection of this vector to two of the sides (width and height) are within + // the bounds of each edge, then the point is inside the rectangle. + // (0 <= c.w <= w.w) && (0 <= c.h <= h.h) + var cornerToPoint = point3D - this.TopLeft; + var widthVectorProjection = cornerToPoint.DotProduct(widthVector); + var heightVectorProjection = cornerToPoint.DotProduct(heightVector); + + if (widthVectorProjection >= 0 && widthVectorProjection <= widthVector.DotProduct(widthVector) && + heightVectorProjection >= 0 && heightVectorProjection <= heightVector.DotProduct(heightVector)) + { + return true; + } + else + { + return false; + } + } + + /// + public bool Equals(Rectangle3D other) => + this.BottomLeft == other.BottomLeft && + this.BottomRight == other.BottomRight && + this.TopLeft == other.TopLeft && + this.TopRight == other.TopRight; + + /// + public override bool Equals(object obj) => obj is Rectangle3D other && this.Equals(other); + + /// + public override int GetHashCode() => HashCode.Combine( + this.BottomRight, + this.BottomLeft, + this.TopRight, + this.TopLeft); + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Voxel.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Voxel.cs new file mode 100644 index 000000000..9def54fb3 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/Voxel.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using MathNet.Spatial.Euclidean; + + /// + /// Represents a voxel carrying data of a specified type. + /// + /// The type of the voxel data. + public class Voxel + { + /// + /// Initializes a new instance of the class. + /// + /// The voxel index. + /// The voxel value. + /// The voxel size. + internal Voxel((int X, int Y, int Z) index, T value, double voxelSize) + { + this.Index = index; + this.Value = value; + this.VoxelSize = voxelSize; + } + + /// + /// Gets the voxel index. + /// + public (int X, int Y, int Z) Index { get; } + + /// + /// Gets or sets the voxel value. + /// + public T Value { get; set; } + + /// + /// Gets the voxel size. + /// + public double VoxelSize { get; } + + /// + /// Gets the center point of the voxel. + /// + /// The center point of the voxel. + public Point3D GetCenter() + => new ( + (this.Index.X + 0.5) * this.VoxelSize, + (this.Index.Y + 0.5) * this.VoxelSize, + (this.Index.Z + 0.5) * this.VoxelSize); + + /// + /// Gets the bounds of the voxel. + /// + /// The bounds of the voxel. + public Bounds3D GetBounds3D() + => new ( + this.Index.X * this.VoxelSize, + (this.Index.X + 1) * this.VoxelSize, + this.Index.Y * this.VoxelSize, + (this.Index.Y + 1) * this.VoxelSize, + this.Index.Z * this.VoxelSize, + (this.Index.Z + 1) * this.VoxelSize); + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/VoxelGrid.cs b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/VoxelGrid.cs new file mode 100644 index 000000000..b7bfb3849 --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/VoxelGrid.cs @@ -0,0 +1,148 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Spatial.Euclidean +{ + using System; + using System.Collections; + using System.Collections.Generic; + using System.Linq; + using MathNet.Spatial.Euclidean; + + /// + /// Represents a voxel grid, with each voxel containing data of a specified type. + /// + /// The type of data in the voxel. + public class VoxelGrid : IEnumerable> + { + // The collection of voxels, stored as a dictionary for fast access + private readonly Dictionary<(int X, int Y, int Z), Voxel> voxels = new (); + + /// + /// Initializes a new instance of the class. + /// + /// The voxel size. + public VoxelGrid(double voxelSize) + { + if (voxelSize <= 0) + { + throw new ArgumentException("Voxel size must be strictly positive.", nameof(voxelSize)); + } + + this.VoxelSize = voxelSize; + } + + /// + /// Gets the voxel size in meters. + /// + public double VoxelSize { get; } + + /// + /// Gets the voxel for a specified index. + /// + /// The index. + /// The voxel. + public Voxel this[(int x, int y, int z) index] + { + get + { + if (!this.voxels.ContainsKey(index)) + { + throw new ArgumentOutOfRangeException("No voxel is available at the specified index."); + } + + return this.voxels[index]; + } + } + + /// + /// Gets the voxel for a specified set of coordinates. + /// + /// The x coordinate. + /// The y coordinate. + /// The z coordinate. + /// The voxel. + public Voxel this[double x, double y, double z] => this[this.GetIndex(x, y, z)]; + + /// + /// Gets the voxel for a specified 3D point. + /// + /// The 3D point. + /// The voxel. + public Voxel this[Point3D point3D] => this[this.GetIndex(point3D)]; + + /// + /// Gets the voxel index for a specified set of coordinates. + /// + /// The x coordinate. + /// The y coordinate. + /// The z coordinate. + /// The corresponding voxel index. + public (int X, int Y, int Z) GetIndex(double x, double y, double z) + => ((int)Math.Floor(x / this.VoxelSize), (int)Math.Floor(y / this.VoxelSize), (int)Math.Floor(z / this.VoxelSize)); + + /// + /// Gets the voxel index for a specified 3D point. + /// + /// The specified 3D point. + /// The corresponding voxel index. + public (int X, int Y, int Z) GetIndex(Point3D point3D) + => this.GetIndex(point3D.X, point3D.Y, point3D.Z); + + /// + /// Indicates whether the voxel grid contains a voxel at a specified index. + /// + /// The specified index. + /// True if the voxel grid contains a voxel at the specified index. + public bool Contains((int X, int Y, int Z) index) + => this.voxels.ContainsKey(index); + + /// + /// Indicates whether the voxel grid covers a specified 3D point. + /// + /// The 3D point. + /// True if the voxel grid covers the specified 3D point. + public bool Contains(Point3D point3D) + => this.voxels.ContainsKey(this.GetIndex(point3D)); + + /// + /// Adds a voxel to the grid at the specified index, with a specified value. + /// + /// The voxel index. + /// The voxel value. + public void Add((int X, int Y, int Z) index, T value) + => this.voxels.Add(index, new Voxel(index, value, this.VoxelSize)); + + /// + /// Removes a set of voxels by a specified predicate on the voxel data. + /// + /// The predicate. + public void Remove(Predicate predicate) => this.Remove(v => predicate(v.Value)); + + /// + /// Removes a set of voxels by a specified predicate on the voxel index. + /// + /// The predicate. + public void Remove(Predicate<(int X, int Y, int Z)> predicate) => this.Remove(v => predicate(v.Index)); + + /// + /// Removes a set of voxels by a specified predicate on the voxel. + /// + /// The predicate. + public void Remove(Predicate> predicate) + { + var keysToRemove = this.voxels.Where(kvp => predicate(kvp.Value)).Select(kvp => kvp.Key).ToArray(); + + foreach (var key in keysToRemove) + { + this.voxels.Remove(key); + } + } + + /// + public IEnumerator> GetEnumerator() => this.voxels.Values.GetEnumerator(); + + /// + IEnumerator IEnumerable.GetEnumerator() => this.GetEnumerator(); + } +} diff --git a/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/stylecop.json b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/stylecop.json new file mode 100644 index 000000000..6f09427eb --- /dev/null +++ b/Sources/Spatial/Microsoft.Psi.Spatial.Euclidean/stylecop.json @@ -0,0 +1,16 @@ +{ + // ACTION REQUIRED: This file was automatically added to your project, but it + // will not take effect until additional steps are taken to enable it. See the + // following page for additional information: + // + // https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/EnableConfiguration.md + + "$schema": "https://raw.githubusercontent.com/DotNetAnalyzers/StyleCopAnalyzers/master/StyleCop.Analyzers/StyleCop.Analyzers/Settings/stylecop.schema.json", + "settings": { + "documentationRules": { + "companyName": "Microsoft Corporation", + "copyrightText": "Copyright (c) Microsoft Corporation. All rights reserved.\nLicensed under the MIT license.", + "xmlHeader": false + } + } +} \ No newline at end of file diff --git a/Sources/Speech/Microsoft.Psi.Speech.Windows/SystemSpeechRecognizer.cs b/Sources/Speech/Microsoft.Psi.Speech.Windows/SystemSpeechRecognizer.cs index a9cf90f5f..fbe0b382d 100644 --- a/Sources/Speech/Microsoft.Psi.Speech.Windows/SystemSpeechRecognizer.cs +++ b/Sources/Speech/Microsoft.Psi.Speech.Windows/SystemSpeechRecognizer.cs @@ -496,7 +496,7 @@ private void OnLoadGrammarCompleted(object sender, LoadGrammarCompletedEventArgs /// originating times. /// /// The type of the output stream. - /// The pipeline in which this component was created. + /// The pipeline to add the component to. /// The name of the stream. /// The group in which to create the stream. /// The newly created emitter for the stream. diff --git a/Sources/Tools/PsiStoreTool/PsiStoreTool.csproj b/Sources/Tools/PsiStoreTool/PsiStoreTool.csproj index 0c82c92f3..eae7a77ca 100644 --- a/Sources/Tools/PsiStoreTool/PsiStoreTool.csproj +++ b/Sources/Tools/PsiStoreTool/PsiStoreTool.csproj @@ -41,6 +41,7 @@ + diff --git a/Sources/Tools/PsiStoreTool/Utility.cs b/Sources/Tools/PsiStoreTool/Utility.cs index 7c5ad9e79..0590bce7a 100644 --- a/Sources/Tools/PsiStoreTool/Utility.cs +++ b/Sources/Tools/PsiStoreTool/Utility.cs @@ -320,7 +320,7 @@ void EncodeImageStreams(IStreamMetadata streamInfo, PsiImporter importer, Export { importer .OpenStream>(streamInfo.Name) - .ToPixelFormat(PixelFormat.BGRA_32bpp) + .Convert(PixelFormat.BGRA_32bpp) .EncodeJpeg(quality) .Write(streamInfo.Name, exporter, true); } diff --git a/Sources/Tools/PsiStudio/Microsoft.Psi.PsiStudio/MainWindow.xaml b/Sources/Tools/PsiStudio/Microsoft.Psi.PsiStudio/MainWindow.xaml index 3b3318130..7a8db6b10 100644 --- a/Sources/Tools/PsiStudio/Microsoft.Psi.PsiStudio/MainWindow.xaml +++ b/Sources/Tools/PsiStudio/Microsoft.Psi.PsiStudio/MainWindow.xaml @@ -26,10 +26,10 @@ - - - - + + + + @@ -71,6 +71,9 @@ + + + @@ -389,6 +392,9 @@ + @@ -426,22 +432,25 @@ - + + + + - + - + - + + + + diff --git a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/CanvasVisualizationPanelView.xaml.cs b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/CanvasVisualizationPanelView.xaml.cs new file mode 100644 index 000000000..36fc495ae --- /dev/null +++ b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/CanvasVisualizationPanelView.xaml.cs @@ -0,0 +1,103 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Visualization.Views +{ + using System.Collections.Generic; + using System.Windows.Controls; + using GalaSoft.MvvmLight.CommandWpf; + using Microsoft.Psi.Visualization.Helpers; + using Microsoft.Psi.Visualization.VisualizationPanels; + + /// + /// Interaction logic for CanvasVisualizationPanelView.xaml. + /// + public partial class CanvasVisualizationPanelView : VisualizationPanelView + { + /// + /// Initializes a new instance of the class. + /// + public CanvasVisualizationPanelView() + { + this.InitializeComponent(); + } + + /// + /// Gets the visualization panel. + /// + protected CanvasVisualizationPanel VisualizationPanel => (CanvasVisualizationPanel)this.DataContext; + + /// + public override void AppendContextMenuItems(List menuItems) + { + // Add Set Cursor Epsilon menu with sub-menu items + var setCursorEpsilonMenuItem = MenuItemHelper.CreateMenuItem( + string.Empty, + "Set Cursor Epsilon (on All Visualizers)", + null, + this.VisualizationPanel.VisualizationObjects.Count > 0); + + setCursorEpsilonMenuItem.Items.Add( + MenuItemHelper.CreateMenuItem( + null, + "Infinite Past", + new RelayCommand( + () => + { + foreach (var visualizationObject in this.VisualizationPanel.VisualizationObjects) + { + visualizationObject.CursorEpsilonNegMs = int.MaxValue; + visualizationObject.CursorEpsilonPosMs = 0; + } + }), + true)); + setCursorEpsilonMenuItem.Items.Add( + MenuItemHelper.CreateMenuItem( + null, + "Last 5 seconds", + new RelayCommand( + () => + { + foreach (var visualizationObject in this.VisualizationPanel.VisualizationObjects) + { + visualizationObject.CursorEpsilonNegMs = 5000; + visualizationObject.CursorEpsilonPosMs = 0; + } + }), + true)); + setCursorEpsilonMenuItem.Items.Add( + MenuItemHelper.CreateMenuItem( + null, + "Last 1 second", + new RelayCommand( + () => + { + foreach (var visualizationObject in this.VisualizationPanel.VisualizationObjects) + { + visualizationObject.CursorEpsilonNegMs = 1000; + visualizationObject.CursorEpsilonPosMs = 0; + } + }), + true)); + setCursorEpsilonMenuItem.Items.Add( + MenuItemHelper.CreateMenuItem( + null, + "Last 50 milliseconds", + new RelayCommand( + () => + { + foreach (var visualizationObject in this.VisualizationPanel.VisualizationObjects) + { + visualizationObject.CursorEpsilonNegMs = 50; + visualizationObject.CursorEpsilonPosMs = 0; + } + }), + true)); + + menuItems.Add(setCursorEpsilonMenuItem); + menuItems.Add(null); + + base.AppendContextMenuItems(menuItems); + } + } +} diff --git a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/IContextMenuItemsSource.cs b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/IContextMenuItemsSource.cs index 84f663130..7bbd529e6 100644 --- a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/IContextMenuItemsSource.cs +++ b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/IContextMenuItemsSource.cs @@ -4,7 +4,6 @@ namespace Microsoft.Psi.Visualization.Views { using System.Collections.Generic; - using System.Windows; using System.Windows.Controls; /// diff --git a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/InstantVisualizationContainerView.xaml b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/InstantVisualizationContainerView.xaml index d3e62f8ad..d29460a53 100644 --- a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/InstantVisualizationContainerView.xaml +++ b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/InstantVisualizationContainerView.xaml @@ -8,6 +8,7 @@ xmlns:cmd="http://www.galasoft.ch/mvvmlight" xmlns:i="clr-namespace:System.Windows.Interactivity;assembly=System.Windows.Interactivity" xmlns:views="clr-namespace:Microsoft.Psi.Visualization.Views" + xmlns:visconv="clr-namespace:Microsoft.Psi.Visualization.Converters" Height="{Binding Height}" IsHitTestVisible="True"> @@ -16,9 +17,10 @@ + - + @@ -45,6 +47,7 @@ + + + diff --git a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/InstantVisualizationContainerView.xaml.cs b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/InstantVisualizationContainerView.xaml.cs index 581c4639b..3a266d211 100644 --- a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/InstantVisualizationContainerView.xaml.cs +++ b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/InstantVisualizationContainerView.xaml.cs @@ -73,11 +73,15 @@ private HitTestResultBehavior ContextMenuHitTestResult(HitTestResult result) DependencyObject dependencyObject = result.VisualHit; while (dependencyObject != null) { - if (dependencyObject is IContextMenuItemsSource contextMenuItemsSource && contextMenuItemsSource.ContextMenuItemsSourceType == ContextMenuItemsSourceType.VisualizationPanel) + // If the dependency object is not a hidden panel + if (!(dependencyObject is VisualizationPanelView visualizationPanelView && !(visualizationPanelView.DataContext as VisualizationPanel).IsShown)) { - // Get the visualization panel related to the visualization panel view - this.mouseOverVisualizationPanel = (contextMenuItemsSource as VisualizationPanelView).DataContext as VisualizationPanel; - return HitTestResultBehavior.Stop; + if (dependencyObject is IContextMenuItemsSource contextMenuItemsSource && contextMenuItemsSource.ContextMenuItemsSourceType == ContextMenuItemsSourceType.VisualizationPanel) + { + // Get the visualization panel related to the visualization panel view + this.mouseOverVisualizationPanel = (contextMenuItemsSource as VisualizationPanelView).DataContext as VisualizationPanel; + return HitTestResultBehavior.Stop; + } } dependencyObject = VisualTreeHelper.GetParent(dependencyObject); @@ -127,27 +131,38 @@ private void ResizeChildVisualizationPanels() foreach (var panel in instantVisualizationContainer.Panels) { - if (panel is XYVisualizationPanel visualizationPanelXY) - { - totalWidth += visualizationPanelXY.RelativeWidth; - } - else if (panel is XYZVisualizationPanel visualizationPanelXYZ) - { - totalWidth += visualizationPanelXYZ.RelativeWidth; - } - else if (panel is InstantVisualizationPlaceholderPanel instantVisualizationPlaceholderPanel) + if (panel.Visible) { - totalWidth += instantVisualizationPlaceholderPanel.RelativeWidth; - } - else - { - throw new Exception("Encountered an unsupported panel type."); + if (panel is CanvasVisualizationPanel visualizationPanelCanvas) + { + totalWidth += visualizationPanelCanvas.RelativeWidth; + } + else if (panel is XYVisualizationPanel visualizationPanelXY) + { + totalWidth += visualizationPanelXY.RelativeWidth; + } + else if (panel is XYZVisualizationPanel visualizationPanelXYZ) + { + totalWidth += visualizationPanelXYZ.RelativeWidth; + } + else if (panel is InstantVisualizationPlaceholderPanel instantVisualizationPlaceholderPanel) + { + totalWidth += instantVisualizationPlaceholderPanel.RelativeWidth; + } + else + { + throw new Exception("Encountered an unsupported panel type."); + } } } foreach (var panel in instantVisualizationContainer.Panels) { - if (panel is XYVisualizationPanel visualizationPanelXY) + if (panel is CanvasVisualizationPanel visualizationPanelCanvas) + { + visualizationPanelCanvas.Width = visualizationPanelCanvas.RelativeWidth * this.ActualWidth / totalWidth; + } + else if (panel is XYVisualizationPanel visualizationPanelXY) { visualizationPanelXY.Width = visualizationPanelXY.RelativeWidth * this.ActualWidth / totalWidth; } @@ -167,19 +182,20 @@ private void ResizeChildVisualizationPanels() } } - private void Root_MouseMove(object sender, MouseEventArgs e) + private void ReorderThumb_MouseMove(object sender, MouseEventArgs e) { // If the user has the Left Mouse button pressed, and we're not near the bottom edge // of the panel (where resizing occurs), then initiate a Drag & Drop reorder operation - Point mousePosition = e.GetPosition(this); + var mousePosition = e.GetPosition(this); + if (e.LeftButton == MouseButtonState.Pressed && !DragDropHelper.MouseNearPanelBottomEdge(mousePosition, this.ActualHeight)) { - DataObject data = new DataObject(); + var data = new DataObject(); data.SetData(DragDropDataName.DragDropOperation, DragDropOperation.ReorderPanel); data.SetData(DragDropDataName.VisualizationPanel, this.VisualizationPanel); data.SetData(DragDropDataName.MouseOffsetFromTop, mousePosition.Y); data.SetData(DragDropDataName.PanelSize, new Size?(new Size(this.ActualWidth, this.ActualHeight))); - RenderTargetBitmap renderTargetBitmap = new RenderTargetBitmap((int)this.ActualWidth, (int)this.ActualHeight, 96, 96, PixelFormats.Pbgra32); + var renderTargetBitmap = new RenderTargetBitmap((int)this.ActualWidth, (int)this.ActualHeight, 96, 96, PixelFormats.Pbgra32); renderTargetBitmap.Render(this); data.SetImage(renderTargetBitmap); diff --git a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/InstantVisualizationPlaceholderPanelView.xaml b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/InstantVisualizationPlaceholderPanelView.xaml index 9527ff065..1b440fb6f 100644 --- a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/InstantVisualizationPlaceholderPanelView.xaml +++ b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/InstantVisualizationPlaceholderPanelView.xaml @@ -57,7 +57,7 @@ VerticalAlignment="Top" HorizontalAlignment="Right" Margin="4,4,4,4"> - + + + + public partial class TimelineVisualizationPanelView : VisualizationPanelView { - private Point lastMousePosition = new Point(0, 0); + private Point lastMousePosition = new (0, 0); private DragOperation currentDragOperation = DragOperation.None; /// @@ -54,6 +54,15 @@ public override void AppendContextMenuItems(List menuItems) IconSourcePath.Legend, timelineVisualizationPanel.ShowLegend ? $"Hide Legend" : $"Show Legend", timelineVisualizationPanel.ShowHideLegendCommand)); + + menuItems.Add(MenuItemHelper.CreateMenuItem( + null, + "Auto-Fit Axes", + this.VisualizationPanel.SetAutoAxisComputeModeCommand, + null, + this.VisualizationPanel.AxisComputeMode == AxisComputeMode.Manual)); + + menuItems.Add(null); } base.AppendContextMenuItems(menuItems); @@ -100,7 +109,7 @@ private void Root_MouseMove(object sender, MouseEventArgs e) switch (this.currentDragOperation) { case DragOperation.None: - this.BeginDragOperation(mousePosition); + this.BeginDragOperation(); break; case DragOperation.TimelineScroll: this.DoDragTimeline(mousePosition); @@ -114,41 +123,36 @@ private void Root_MouseMove(object sender, MouseEventArgs e) } this.lastMousePosition = mousePosition; - e.Handled = true; } - private void BeginDragOperation(Point mousePosition) + private void ReorderThumb_MouseMove(object sender, MouseEventArgs e) { - // If the mouse moved mostly horizontally, then we'll begin a timeline scroll - // operation, otherwise we'll begin a Visualization Panel reorder operation - if (this.IsHorizontalDrag(mousePosition)) + // If the user has the Left Mouse button pressed, and we're not near the bottom edge + // of the panel (where resizing occurs), then initiate a Drag & Drop reorder operation + var mousePosition = e.GetPosition(this); + + if (e.LeftButton == MouseButtonState.Pressed && !DragDropHelper.MouseNearPanelBottomEdge(mousePosition, this.ActualHeight)) { - // Only drag the timeline if the navigator is currently paused - if (VisualizationContext.Instance.VisualizationContainer.Navigator.CursorMode == CursorMode.Manual) - { - this.currentDragOperation = DragOperation.TimelineScroll; - this.Cursor = Cursors.Hand; - } + var data = new DataObject(); + data.SetData(DragDropDataName.DragDropOperation, DragDropOperation.ReorderPanel); + data.SetData(DragDropDataName.VisualizationPanel, this.VisualizationPanel); + data.SetData(DragDropDataName.MouseOffsetFromTop, mousePosition.Y); + data.SetData(DragDropDataName.PanelSize, new Size?(new Size(this.ActualWidth, this.ActualHeight))); + var renderTargetBitmap = new RenderTargetBitmap((int)this.ActualWidth, (int)this.ActualHeight, 96, 96, PixelFormats.Pbgra32); + renderTargetBitmap.Render(this); + data.SetImage(renderTargetBitmap); + + DragDrop.DoDragDrop(this, data, DragDropEffects.Move); } - else + } + + private void BeginDragOperation() + { + // Only drag the timeline if the navigator is currently paused + if (VisualizationContext.Instance.VisualizationContainer.Navigator.CursorMode == CursorMode.Manual) { - if (!DragDropHelper.MouseNearPanelBottomEdge(mousePosition, this.ActualHeight)) - { - this.currentDragOperation = DragOperation.PanelReorder; - - DataObject data = new DataObject(); - data.SetData(DragDropDataName.DragDropOperation, DragDropOperation.ReorderPanel); - data.SetData(DragDropDataName.VisualizationPanel, this.VisualizationPanel); - data.SetData(DragDropDataName.VisualizationPanelView, this); - data.SetData(DragDropDataName.MouseOffsetFromTop, mousePosition.Y); - data.SetData(DragDropDataName.PanelSize, new Size?(new Size(this.ActualWidth, this.ActualHeight))); - RenderTargetBitmap renderTargetBitmap = new RenderTargetBitmap((int)this.ActualWidth, (int)this.ActualHeight, 96, 96, PixelFormats.Pbgra32); - renderTargetBitmap.Render(this); - data.SetImage(renderTargetBitmap); - - DragDrop.DoDragDrop(this, data, DragDropEffects.Move); - this.Cursor = Cursors.Hand; - } + this.currentDragOperation = DragOperation.TimelineScroll; + this.Cursor = Cursors.Hand; } } @@ -162,13 +166,5 @@ private void DoDragTimeline(Point mousePosition) // Scroll the view viewRange.ScrollBy(-timeMoved); } - - private bool IsHorizontalDrag(Point mousePosition) - { - // Users will most likely be wanting to scroll the panel horizontally much more often - // than they'll re-order the panels, so only call this a Vertical drag if the Y mouse - // movement is at least 3 times the X mouse movement. - return 3 * Math.Abs(mousePosition.X - this.lastMousePosition.X) > Math.Abs(mousePosition.Y - this.lastMousePosition.Y); - } } } diff --git a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/VisualizationContainerView.xaml b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/VisualizationContainerView.xaml index e99000a52..684ec0e39 100644 --- a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/VisualizationContainerView.xaml +++ b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/VisualizationContainerView.xaml @@ -72,6 +72,7 @@ - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + diff --git a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/XYVisualizationPanelView.xaml.cs b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/XYVisualizationPanelView.xaml.cs index 7897d3d7f..9d0171b5a 100644 --- a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/XYVisualizationPanelView.xaml.cs +++ b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/XYVisualizationPanelView.xaml.cs @@ -3,6 +3,10 @@ namespace Microsoft.Psi.Visualization.Views { + using System.Collections.Generic; + using System.Windows.Controls; + using GalaSoft.MvvmLight.CommandWpf; + using Microsoft.Psi.Visualization.Helpers; using Microsoft.Psi.Visualization.VisualizationPanels; /// @@ -22,5 +26,86 @@ public XYVisualizationPanelView() /// Gets the visualization panel. /// protected XYVisualizationPanel VisualizationPanel => (XYVisualizationPanel)this.DataContext; + + /// + public override void AppendContextMenuItems(List menuItems) + { + menuItems.Add(MenuItemHelper.CreateMenuItem( + null, + "Auto-Fit Axes", + this.VisualizationPanel.SetAutoAxisComputeModeCommand, + null, + this.VisualizationPanel.AxisComputeMode == AxisComputeMode.Manual)); + + // Add Set Cursor Epsilon menu with sub-menu items + menuItems.Add(null); + var setCursorEpsilonMenuItem = MenuItemHelper.CreateMenuItem( + string.Empty, + "Set Cursor Epsilon (on All Visualizers)", + null, + this.VisualizationPanel.VisualizationObjects.Count > 0); + + setCursorEpsilonMenuItem.Items.Add( + MenuItemHelper.CreateMenuItem( + null, + "Infinite Past", + new RelayCommand( + () => + { + foreach (var visualizationObject in this.VisualizationPanel.VisualizationObjects) + { + visualizationObject.CursorEpsilonNegMs = int.MaxValue; + visualizationObject.CursorEpsilonPosMs = 0; + } + }), + true)); + setCursorEpsilonMenuItem.Items.Add( + MenuItemHelper.CreateMenuItem( + null, + "Last 5 seconds", + new RelayCommand( + () => + { + foreach (var visualizationObject in this.VisualizationPanel.VisualizationObjects) + { + visualizationObject.CursorEpsilonNegMs = 5000; + visualizationObject.CursorEpsilonPosMs = 0; + } + }), + true)); + setCursorEpsilonMenuItem.Items.Add( + MenuItemHelper.CreateMenuItem( + null, + "Last 1 second", + new RelayCommand( + () => + { + foreach (var visualizationObject in this.VisualizationPanel.VisualizationObjects) + { + visualizationObject.CursorEpsilonNegMs = 1000; + visualizationObject.CursorEpsilonPosMs = 0; + } + }), + true)); + setCursorEpsilonMenuItem.Items.Add( + MenuItemHelper.CreateMenuItem( + null, + "Last 50 milliseconds", + new RelayCommand( + () => + { + foreach (var visualizationObject in this.VisualizationPanel.VisualizationObjects) + { + visualizationObject.CursorEpsilonNegMs = 50; + visualizationObject.CursorEpsilonPosMs = 0; + } + }), + true)); + + menuItems.Add(setCursorEpsilonMenuItem); + menuItems.Add(null); + + base.AppendContextMenuItems(menuItems); + } } } diff --git a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/XYZVisualizationPanelView.xaml b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/XYZVisualizationPanelView.xaml index d25a1c5e2..f6bc47580 100644 --- a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/XYZVisualizationPanelView.xaml +++ b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Views/XYZVisualizationPanelView.xaml @@ -25,7 +25,21 @@ - + @@ -66,6 +80,30 @@ + + + + + + diff --git a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/GetParameterWindow.xaml.cs b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/GetParameterWindow.xaml.cs new file mode 100644 index 000000000..685046e3a --- /dev/null +++ b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/GetParameterWindow.xaml.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Visualization.Windows +{ + using System; + using System.ComponentModel; + using System.Windows; + + /// + /// Interaction logic for GetParameterWindow.xaml. + /// + public partial class GetParameterWindow : Window, INotifyPropertyChanged + { + private readonly Func validator; + private bool isValid = false; + + /// + /// Initializes a new instance of the class. + /// + /// The window that owns this window. + /// The initial store name to display. + /// The initial store path to display. + /// An optional validator function. + public GetParameterWindow(Window owner, string parameterName, string initialParameterValue, Func validator = null) + { + this.InitializeComponent(); + + this.Owner = owner; + this.DataContext = this; + + this.ParameterName = parameterName; + this.ParameterValue = initialParameterValue; + this.validator = validator ?? (value => (true, null)); + + this.Validate(); + } + + /// + /// Occurs when a property has changed. + /// + public event PropertyChangedEventHandler PropertyChanged; + + /// + /// Gets or sets the parameter name. + /// + public string ParameterName { get; set; } + + /// + /// Gets or sets the parameter value. + /// + public string ParameterValue { get; set; } + + /// + /// Gets or sets a value indicating whether the dialog values are valid. + /// + public bool IsValid + { + get => this.isValid; + set + { + if (this.isValid != value) + { + this.isValid = value; + if (this.PropertyChanged != null) + { + this.PropertyChanged.Invoke(this, new PropertyChangedEventArgs(nameof(this.IsValid))); + } + } + } + } + + private void OKButton_Click(object sender, RoutedEventArgs e) + { + this.DialogResult = true; + e.Handled = true; + } + + private void ValidateFormValues(object sender, RoutedEventArgs e) + { + // This method is called whenever the text in one of the textboxes changes + this.Validate(); + e.Handled = true; + } + + private void Validate() + { + this.IsValid = this.validator(this.ParameterValue).Item1; + } + } +} \ No newline at end of file diff --git a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/RunBatchProcessingTaskWindow.xaml b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/RunBatchProcessingTaskWindow.xaml index 3129f18c2..f8ab819fe 100644 --- a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/RunBatchProcessingTaskWindow.xaml +++ b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/RunBatchProcessingTaskWindow.xaml @@ -4,17 +4,30 @@ + + + + + @@ -22,26 +35,51 @@ + + + + + + + - - - + + + - diff --git a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/RunBatchProcessingTaskWindow.xaml.cs b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/RunBatchProcessingTaskWindow.xaml.cs index 5a0804101..2e9123b23 100644 --- a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/RunBatchProcessingTaskWindow.xaml.cs +++ b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/RunBatchProcessingTaskWindow.xaml.cs @@ -3,7 +3,6 @@ namespace Microsoft.Psi.Visualization.Windows { - using System; using System.Windows; /// @@ -11,117 +10,37 @@ namespace Microsoft.Psi.Visualization.Windows /// public partial class RunBatchProcessingTaskWindow : Window { - private DateTime startTime = DateTime.MinValue; - /// /// Initializes a new instance of the class. /// /// The owner of this window. - /// The task running. - /// The target object the task is currently running on. - /// A parameter specifying the data size. - public RunBatchProcessingTaskWindow(Window owner, string runningTask, string target, TimeSpan dataSize) + /// The view model for this window. + public RunBatchProcessingTaskWindow(Window owner, RunBatchProcessingTaskWindowViewModel viewModel) { this.InitializeComponent(); this.Owner = owner; - this.TaskName = runningTask; - this.Target = target; - this.DataSizeLabel.Content = dataSize.ToString(); + this.DataContext = viewModel; } - /// - /// Gets or sets the task name. - /// - public string TaskName - { - get { return (string)this.TaskNameLabel.Content; } - set { this.TaskNameLabel.Content = value; } - } + private RunBatchProcessingTaskWindowViewModel ViewModel => this.DataContext as RunBatchProcessingTaskWindowViewModel; - /// - /// Gets or sets the current target of the task. - /// - public string Target + private void RunButtonClick(object sender, RoutedEventArgs e) { - get { return (string)this.TargetLabel.Content; } - set { this.TargetLabel.Content = value; } - } - - /// - /// Gets or sets the progress of the task. - /// - public double Progress - { - get { return this.ProgressBar.Value; } - - set + if (this.ViewModel.Configuration.Validate(out string error)) { - this.PercentCompleteLabel.Content = $"{value:0.0}%"; - this.ProgressBar.Value = value; - - if (this.startTime == DateTime.MinValue) - { - this.startTime = DateTime.UtcNow; - } - else if (value > 0) - { - var progress = value * 0.01; - var elapsedTime = DateTime.UtcNow - this.startTime; - var estimatedRemainingTime = TimeSpan.FromTicks((long)(elapsedTime.Ticks * ((1 - progress) / progress))); - this.ElapsedTimeLabel.Content = this.GetTimeSpanAsFriendlyString(elapsedTime); - this.EstimatedRemainingTimeLabel.Content = "about " + this.GetTimeSpanAsFriendlyString(estimatedRemainingTime); - } - } - } - - private string GetTimeSpanAsFriendlyString(TimeSpan timeSpan) - { - var result = string.Empty; - - if (timeSpan.Days > 1) - { - result += $"{timeSpan.Days} days, "; - } - else if (timeSpan.Days == 1) - { - result += $"{timeSpan.Days} day, "; - } - - if (timeSpan.Hours > 1) - { - result += $"{timeSpan.Hours} hours, "; - } - else if (timeSpan.Hours == 1) - { - result += $"{timeSpan.Hours} hour, "; + this.ViewModel + .RunAsync() + .ContinueWith(_ => Application.Current.Dispatcher.Invoke(() => + { + this.DialogResult = true; + this.Close(); + })); } - - if (timeSpan.Days < 1) + else { - if (timeSpan.Minutes > 1) - { - result += $"{timeSpan.Minutes} minutes, "; - } - else if (timeSpan.Minutes == 1) - { - result += $"{timeSpan.Minutes} minute, "; - } - - if (timeSpan.Hours < 1) - { - if (timeSpan.Seconds > 1 || timeSpan.Seconds == 0) - { - result += $"{timeSpan.Seconds} seconds, "; - } - else if (timeSpan.Seconds == 1) - { - result += $"{timeSpan.Seconds} second, "; - } - } + new MessageBoxWindow(this, "Invalid Configuration", error, cancelButtonText: null).ShowDialog(); } - - return result.EndsWith(", ") ? result.TrimEnd(new char[] { ',', ' ' }) + "." : result; } } } \ No newline at end of file diff --git a/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/RunBatchProcessingTaskWindowViewModel.cs b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/RunBatchProcessingTaskWindowViewModel.cs new file mode 100644 index 000000000..23c19b347 --- /dev/null +++ b/Sources/Visualization/Microsoft.Psi.Visualization.Windows/Windows/RunBatchProcessingTaskWindowViewModel.cs @@ -0,0 +1,247 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +namespace Microsoft.Psi.Visualization.Windows +{ + using System; + using System.Linq; + using System.Threading.Tasks; + using System.Windows; + using Microsoft.Psi.Data; + using Microsoft.Psi.Visualization.Helpers; + using Microsoft.Psi.Visualization.ViewModels; + using Microsoft.Psi.Visualization.VisualizationObjects; + + /// + /// Implements a view model for the . + /// + public class RunBatchProcessingTaskWindowViewModel : ObservableObject + { + private readonly VisualizationContainer visualizationContainer; + private readonly DatasetViewModel datasetViewModel; + private readonly SessionViewModel sessionViewModel; + private readonly BatchProcessingTaskMetadata batchProcessingTaskMetadata; + private Visibility configVisibility = Visibility.Visible; + private Visibility runningVisibility = Visibility.Collapsed; + private string name = null; + private string description = null; + private string target = null; + private string dataSize = null; + private double progress = 0; + private string percentCompleteAsString = null; + private string elapsedTime = null; + private string estimatedRemainingTime = null; + private BatchProcessingTaskConfiguration configuration = null; + + /// + /// Initializes a new instance of the class. + /// + /// The visualization container. + /// The dataset view model. + /// The batch processing task metadata. + public RunBatchProcessingTaskWindowViewModel(VisualizationContainer visualizationContainer, DatasetViewModel datasetViewModel, BatchProcessingTaskMetadata batchProcessingTaskMetadata) + { + this.visualizationContainer = visualizationContainer; + this.datasetViewModel = datasetViewModel; + this.batchProcessingTaskMetadata = batchProcessingTaskMetadata; + this.Name = batchProcessingTaskMetadata.Name; + this.Description = batchProcessingTaskMetadata.Description; + this.Target = datasetViewModel.Name; + this.DataSize = TimeSpanFormatHelper.FormatTimeSpanApproximate( + new TimeSpan(datasetViewModel.SessionViewModels.Sum(svm => svm.OriginatingTimeInterval.Span.Ticks))); + this.Configuration = batchProcessingTaskMetadata.GetDefaultConfiguration(); + } + + /// + /// Initializes a new instance of the class. + /// + /// The visualization container. + /// The dataset view model. + /// The batch processing task metadata. + public RunBatchProcessingTaskWindowViewModel(VisualizationContainer visualizationContainer, SessionViewModel sessionViewModel, BatchProcessingTaskMetadata batchProcessingTaskMetadata) + { + this.visualizationContainer = visualizationContainer; + this.sessionViewModel = sessionViewModel; + this.batchProcessingTaskMetadata = batchProcessingTaskMetadata; + this.Name = batchProcessingTaskMetadata.Name; + this.Description = batchProcessingTaskMetadata.Description; + this.Target = sessionViewModel.Name; + this.DataSize = TimeSpanFormatHelper.FormatTimeSpanApproximate(sessionViewModel.OriginatingTimeInterval.Span); + this.Configuration = batchProcessingTaskMetadata.GetDefaultConfiguration(); + } + + /// + /// Gets or sets the task name. + /// + public string Name + { + get => this.name; + set => this.Set(nameof(this.Name), ref this.name, value); + } + + /// + /// Gets or sets the current target of the task. + /// + public string Description + { + get => this.description; + set => this.Set(nameof(this.description), ref this.description, value); + } + + /// + /// Gets or sets the current target of the task. + /// + public string Target + { + get => this.target; + set => this.Set(nameof(this.Target), ref this.target, value); + } + + /// + /// Gets or sets the data size. + /// + public string DataSize + { + get => this.dataSize; + set => this.Set(nameof(this.DataSize), ref this.dataSize, value); + } + + /// + /// Gets or sets the progress of the task. + /// + public double Progress + { + get => this.progress; + set => this.Set(nameof(this.Progress), ref this.progress, value); + } + + /// + /// Gets or sets the percentage complete as string. + /// + public string PercentageCompleteAsString + { + get => this.percentCompleteAsString; + set => this.Set(nameof(this.PercentageCompleteAsString), ref this.percentCompleteAsString, value); + } + + /// + /// Gets or sets the elapsed time. + /// + public string ElapsedTime + { + get => this.elapsedTime; + set => this.Set(nameof(this.ElapsedTime), ref this.elapsedTime, value); + } + + /// + /// Gets or sets the estimated remaining time. + /// + public string EstimatedRemainingTime + { + get => this.estimatedRemainingTime; + set => this.Set(nameof(this.EstimatedRemainingTime), ref this.estimatedRemainingTime, value); + } + + /// + /// Gets or sets the batch processing task configuration. + /// + public BatchProcessingTaskConfiguration Configuration + { + get => this.configuration; + set => this.Set(nameof(this.Configuration), ref this.configuration, value); + } + + /// + /// Gets or sets the configuration-time visibility. + /// + public Visibility ConfigVisibility + { + get => this.configVisibility; + set => this.Set(nameof(this.ConfigVisibility), ref this.configVisibility, value); + } + + /// + /// Gets or sets the running-time visibility. + /// + public Visibility RunningVisibility + { + get => this.runningVisibility; + set => this.Set(nameof(this.RunningVisibility), ref this.runningVisibility, value); + } + + /// + /// Run the batch processing task. + /// + /// The async threading task that runs the batch processing task. + public async Task RunAsync() + { + this.ConfigVisibility = Visibility.Collapsed; + this.RunningVisibility = Visibility.Visible; + + var startTime = DateTime.MinValue; + + // Unbind any visualizers currently bound to the output store + this.visualizationContainer.UnbindVisualizationObjectsFromStore(this.configuration.OutputStoreName, this.configuration.OutputStorePath, null); + + // Initialize progress reporter for the status window + if (this.datasetViewModel != null) + { + var progress = new Progress<(string, double)>(tuple => + { + if (startTime == DateTime.MinValue) + { + startTime = DateTime.UtcNow; + } + + this.Target = $"{this.datasetViewModel.Name} : {tuple.Item1}"; + this.Progress = tuple.Item2 * 100; + this.PercentageCompleteAsString = $"{this.Progress:0.0}%"; + var elapsedTime = DateTime.UtcNow - startTime; + var estimatedRemainingTime = TimeSpan.FromTicks((long)(elapsedTime.Ticks * ((1 - tuple.Item2) / tuple.Item2))); + this.ElapsedTime = TimeSpanFormatHelper.FormatTimeSpanApproximate(elapsedTime); + this.EstimatedRemainingTime = "about " + TimeSpanFormatHelper.FormatTimeSpanApproximate(estimatedRemainingTime); + }); + + await this.datasetViewModel.Dataset.CreateDerivedPartitionAsync( + (pipeline, sessionImporter, exporter) => this.batchProcessingTaskMetadata.Run(pipeline, sessionImporter, exporter, this.Configuration), + this.Configuration.OutputPartitionName, + overwrite: true, + outputStoreName: this.Configuration.OutputStoreName, + outputStorePath: this.Configuration.OutputStorePath, + replayDescriptor: this.Configuration.ReplayAllRealTime ? ReplayDescriptor.ReplayAllRealTime : ReplayDescriptor.ReplayAll, + deliveryPolicy: this.Configuration.DeliveryPolicyLatestMessage ? DeliveryPolicy.LatestMessage : null, + enableDiagnostics: this.Configuration.EnableDiagnostics, + progress: progress); + } + else + { + var progress = new Progress<(string, double)>(tuple => + { + if (startTime == DateTime.MinValue) + { + startTime = DateTime.UtcNow; + } + + this.Target = tuple.Item1; + this.Progress = tuple.Item2 * 100; + this.PercentageCompleteAsString = $"{this.Progress:0.0}%"; + var elapsedTime = DateTime.UtcNow - startTime; + var estimatedRemainingTime = TimeSpan.FromTicks((long)(elapsedTime.Ticks * ((1 - tuple.Item2) / tuple.Item2))); + this.ElapsedTime = TimeSpanFormatHelper.FormatTimeSpanApproximate(elapsedTime); + this.EstimatedRemainingTime = "about " + TimeSpanFormatHelper.FormatTimeSpanApproximate(estimatedRemainingTime); + }); + + await this.sessionViewModel.Session.CreateDerivedPartitionAsync( + (pipeline, sessionImporter, exporter) => this.batchProcessingTaskMetadata.Run(pipeline, sessionImporter, exporter, this.Configuration), + this.Configuration.OutputPartitionName, + overwrite: true, + outputStoreName: this.Configuration.OutputStoreName, + outputStorePath: this.Configuration.OutputStorePath, + replayDescriptor: this.Configuration.ReplayAllRealTime ? ReplayDescriptor.ReplayAllRealTime : ReplayDescriptor.ReplayAll, + deliveryPolicy: this.Configuration.DeliveryPolicyLatestMessage ? DeliveryPolicy.LatestMessage : null, + enableDiagnostics: this.Configuration.EnableDiagnostics, + progress: progress); + } + } + } +} diff --git a/Sources/Visualization/Test.Psi.Visualization/OberservableSortedCollectionUnitTest.cs b/Sources/Visualization/Test.Psi.Visualization/OberservableSortedCollectionUnitTest.cs index 11d28c022..2a1a65a56 100644 --- a/Sources/Visualization/Test.Psi.Visualization/OberservableSortedCollectionUnitTest.cs +++ b/Sources/Visualization/Test.Psi.Visualization/OberservableSortedCollectionUnitTest.cs @@ -6,7 +6,7 @@ namespace Test.Psi.Visualization using System; using System.Collections.Generic; using System.Collections.Specialized; - using Microsoft.Psi.Visualization.Collections; + using Microsoft.Psi.Visualization; using Microsoft.VisualStudio.TestTools.UnitTesting; /// diff --git a/Sources/Visualization/Test.Psi.Visualization/ObservableKeyedCacheUnitTest.cs b/Sources/Visualization/Test.Psi.Visualization/ObservableKeyedCacheUnitTest.cs index b288b664b..29c962747 100644 --- a/Sources/Visualization/Test.Psi.Visualization/ObservableKeyedCacheUnitTest.cs +++ b/Sources/Visualization/Test.Psi.Visualization/ObservableKeyedCacheUnitTest.cs @@ -6,7 +6,7 @@ namespace Test.Psi.Visualization using System; using System.Collections.Generic; using System.Collections.Specialized; - using Microsoft.Psi.Visualization.Collections; + using Microsoft.Psi.Visualization; using Microsoft.VisualStudio.TestTools.UnitTesting; [TestClass] diff --git a/Sources/Visualization/Test.Psi.Visualization/Properties/AssemblyInfo.cs b/Sources/Visualization/Test.Psi.Visualization/Properties/AssemblyInfo.cs index 8f164040b..a7f51db35 100644 --- a/Sources/Visualization/Test.Psi.Visualization/Properties/AssemblyInfo.cs +++ b/Sources/Visualization/Test.Psi.Visualization/Properties/AssemblyInfo.cs @@ -10,6 +10,6 @@ [assembly: AssemblyCopyright("Copyright (C) Microsoft Corporation. All rights reserved.")] [assembly: ComVisible(false)] [assembly: Guid("7cd463a8-61bb-4937-aa96-02da13e622d0")] -[assembly: AssemblyVersion("0.15.49.1")] -[assembly: AssemblyFileVersion("0.15.49.1")] -[assembly: AssemblyInformationalVersion("0.15.49.1-beta")] +[assembly: AssemblyVersion("0.16.92.1")] +[assembly: AssemblyFileVersion("0.16.92.1")] +[assembly: AssemblyInformationalVersion("0.16.92.1-beta")] diff --git a/ThirdPartyNotices.txt b/ThirdPartyNotices.txt index 93dcbae55..f9ebb8c29 100644 --- a/ThirdPartyNotices.txt +++ b/ThirdPartyNotices.txt @@ -8,6 +8,7 @@ This project is based on or incorporates material from the projects listed below 1. Extended.Wpf.Toolkit version 3.1 (https://github.com/xceedsoftware/wpftoolkit) 2. RoomAlive Toolkit (https://github.com/Microsoft/RoomAliveToolkit) 3. Microsoft Automatic Graph Layout (https://github.com/microsoft/automatic-graph-layout) +4. HoloLens2ForCV (https://github.com/microsoft/HoloLens2ForCV) %% Extended.Wpf.Toolkit NOTICES AND INFORMATION BEGIN HERE ========================================= @@ -105,3 +106,30 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ========================================= END OF Microsoft Automatic Graph Layout NOTICES AND INFORMATION + + +%% HoloLens2ForCV NOTICES AND INFORMATION BEGIN HERE +========================================= +MIT License + +Copyright (c) Microsoft Corporation. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE +========================================= +END OF HoloLens2ForCV NOTICES AND INFORMATION