diff --git a/.gitignore b/.gitignore
index 03900605..9416d3cc 100644
--- a/.gitignore
+++ b/.gitignore
@@ -233,4 +233,4 @@ readme.txt
**/src/OpenSEE/NUglify.dll
**/src/OpenSEE/web.config.backup
-**/src/OpenSee/scripts/*.js
\ No newline at end of file
+**/src/OpenSee/wwwroot/scripts/*.js
\ No newline at end of file
diff --git a/src/Libraries/FaultAlgorithms/Conductor.cs b/src/Libraries/FaultAlgorithms/Conductor.cs
new file mode 100644
index 00000000..5b5a24fe
--- /dev/null
+++ b/src/Libraries/FaultAlgorithms/Conductor.cs
@@ -0,0 +1,122 @@
+//*********************************************************************************************************************
+// Conductor.cs
+// Version 1.1 and subsequent releases
+//
+// Copyright © 2013, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Version 1.0
+//
+// Copyright 2012 ELECTRIC POWER RESEARCH INSTITUTE, INC. All rights reserved.
+//
+// openFLE ("this software") is licensed under BSD 3-Clause license.
+//
+// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+// following conditions are met:
+//
+// • Redistributions of source code must retain the above copyright notice, this list of conditions and
+// the following disclaimer.
+//
+// • Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
+// the following disclaimer in the documentation and/or other materials provided with the distribution.
+//
+// • Neither the name of the Electric Power Research Institute, Inc. (“EPRI”) nor the names of its contributors
+// may be used to endorse or promote products derived from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL EPRI BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+// POSSIBILITY OF SUCH DAMAGE.
+//
+//
+// This software incorporates work covered by the following copyright and permission notice:
+//
+// • TVA Code Library 4.0.4.3 - Tennessee Valley Authority, tvainfo@tva.gov
+// No copyright is claimed pursuant to 17 USC § 105. All Other Rights Reserved.
+//
+// Licensed under TVA Custom License based on NASA Open Source Agreement (TVA Custom NOSA);
+// you may not use TVA Code Library except in compliance with the TVA Custom NOSA. You may
+// obtain a copy of the TVA Custom NOSA at http://tvacodelibrary.codeplex.com/license.
+//
+// TVA Code Library is provided by the copyright holders and contributors "as is" and any express
+// or implied warranties, including, but not limited to, the implied warranties of merchantability
+// and fitness for a particular purpose are disclaimed.
+//
+//*********************************************************************************************************************
+//
+// Code Modification History:
+// -------------------------------------------------------------------------------------------------------------------
+// 06/14/2012 - Stephen C. Wills, Grid Protection Alliance
+// Generated original version of source code.
+//
+//*********************************************************************************************************************
+
+namespace FaultAlgorithms
+{
+ ///
+ /// Contains data for both the voltage
+ /// and current on a conductor.
+ ///
+ public class Conductor
+ {
+ #region [ Members ]
+
+ // Fields
+
+ ///
+ /// One cycle of voltage data.
+ ///
+ public Cycle V;
+
+ ///
+ /// One cycle of current data.
+ ///
+ public Cycle I;
+
+ #endregion
+
+ #region [ Constructors ]
+
+ ///
+ /// Creates a new instance of the class.
+ ///
+ public Conductor()
+ {
+ V = new Cycle();
+ I = new Cycle();
+ }
+
+ ///
+ /// Creates a new instance of the class.
+ ///
+ /// The index of the cycle to be calculated.
+ /// The value to divide from the sample rate to determine the starting location of the cycle.
+ /// The frequency of the sine wave during this cycle.
+ /// The voltage data points.
+ /// The current data points.
+ public Conductor(int cycleIndex, int sampleRateDivisor, double frequency, MeasurementData voltageData, MeasurementData currentData)
+ {
+ int vStart = cycleIndex * (voltageData.SampleRate / sampleRateDivisor);
+ int iStart = cycleIndex * (currentData.SampleRate / sampleRateDivisor);
+ V = new Cycle(vStart, frequency, voltageData);
+ I = new Cycle(iStart, frequency, currentData);
+ }
+
+ #endregion
+ }
+}
diff --git a/src/Libraries/FaultAlgorithms/Cycle.cs b/src/Libraries/FaultAlgorithms/Cycle.cs
new file mode 100644
index 00000000..91db47e8
--- /dev/null
+++ b/src/Libraries/FaultAlgorithms/Cycle.cs
@@ -0,0 +1,200 @@
+//*********************************************************************************************************************
+// Cycle.cs
+// Version 1.1 and subsequent releases
+//
+// Copyright 2013, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Version 1.0
+//
+// Copyright 2012 ELECTRIC POWER RESEARCH INSTITUTE, INC. All rights reserved.
+//
+// openFLE ("this software") is licensed under BSD 3-Clause license.
+//
+// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+// following conditions are met:
+//
+// Redistributions of source code must retain the above copyright notice, this list of conditions and
+// the following disclaimer.
+//
+// Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
+// the following disclaimer in the documentation and/or other materials provided with the distribution.
+//
+// Neither the name of the Electric Power Research Institute, Inc. (EPRI) nor the names of its contributors
+// may be used to endorse or promote products derived from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL EPRI BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+// POSSIBILITY OF SUCH DAMAGE.
+//
+//
+// This software incorporates work covered by the following copyright and permission notice:
+//
+// TVA Code Library 4.0.4.3 - Tennessee Valley Authority, tvainfo@tva.gov
+// No copyright is claimed pursuant to 17 USC 105. All Other Rights Reserved.
+//
+// Licensed under TVA Custom License based on NASA Open Source Agreement (TVA Custom NOSA);
+// you may not use TVA Code Library except in compliance with the TVA Custom NOSA. You may
+// obtain a copy of the TVA Custom NOSA at http://tvacodelibrary.codeplex.com/license.
+//
+// TVA Code Library is provided by the copyright holders and contributors "as is" and any express
+// or implied warranties, including, but not limited to, the implied warranties of merchantability
+// and fitness for a particular purpose are disclaimed.
+//
+//*********************************************************************************************************************
+//
+// Code Modification History:
+// -------------------------------------------------------------------------------------------------------------------
+// 05/23/2012 - J. Ritchie Carroll, Grid Protection Alliance
+// Generated original version of source code.
+//
+//*********************************************************************************************************************
+
+using Gemstone;
+using Gemstone.Numeric;
+using Gemstone.Numeric.Analysis;
+using Gemstone.Units;
+
+namespace FaultAlgorithms
+{
+ ///
+ /// Represents a cycle of single phase power frequency-domain data.
+ ///
+ public class Cycle
+ {
+ #region [ Members ]
+
+ // Constants
+ private const double PiOverTwo = Math.PI / 2.0D;
+
+ // Fields
+
+ ///
+ /// The actual frequency of the cycle in hertz.
+ ///
+ public double Frequency;
+
+ ///
+ /// The complex number representation of the RMS phasor.
+ ///
+ public ComplexNumber Complex;
+
+ ///
+ /// The most extreme data point in the cycle.
+ ///
+ public double Peak;
+
+ ///
+ /// The error between the sine fit and the given data values.
+ ///
+ public double Error;
+
+ #endregion
+
+ #region [ Constructors ]
+
+ ///
+ /// Creates a new instance of the class.
+ ///
+ public Cycle()
+ {
+ }
+
+ ///
+ /// Creates a new instance of the class.
+ ///
+ /// The index of the start of the cycle.
+ /// The frequency of the measured system, in Hz.
+ /// The time-domain data to be used to calculate frequency-domain values.
+ public Cycle(int startSample, double frequency, MeasurementData waveFormData)
+ {
+ long timeStart;
+ double[] timeInSeconds;
+ double[] measurements;
+ SineWave sineFit;
+
+ if (startSample < 0)
+ throw new ArgumentOutOfRangeException("startSample");
+
+ if (startSample + waveFormData.SampleRate > waveFormData.Times.Length)
+ throw new ArgumentOutOfRangeException("startSample");
+
+ if (startSample + waveFormData.SampleRate > waveFormData.Measurements.Length)
+ throw new ArgumentOutOfRangeException("startSample");
+
+ timeStart = waveFormData.Times[startSample];
+ timeInSeconds = new double[waveFormData.SampleRate];
+ measurements = new double[waveFormData.SampleRate];
+
+ for (int i = 0; i < waveFormData.SampleRate; i++)
+ {
+ timeInSeconds[i] = Ticks.ToSeconds(waveFormData.Times[i + startSample] - timeStart);
+ measurements[i] = waveFormData.Measurements[i + startSample];
+ }
+
+ sineFit = WaveFit.SineFit(measurements, timeInSeconds, frequency);
+
+ RMS = Math.Sqrt(measurements.Select(vi => vi * vi).Average());
+ Phase = sineFit.Phase - PiOverTwo;
+ Peak = sineFit.Amplitude;
+ Frequency = frequency;
+
+ Error = timeInSeconds
+ .Select(time => sineFit.CalculateY(time))
+ .Zip(measurements, (calc, measurement) => Math.Abs(calc - measurement))
+ .Sum();
+ }
+
+ #endregion
+
+ #region [ Properties ]
+
+ ///
+ /// Root-mean-square of the in the cycle.
+ ///
+ public double RMS
+ {
+ get
+ {
+ return Complex.Magnitude;
+ }
+ set
+ {
+ Complex.Magnitude = value;
+ }
+ }
+
+ ///
+ /// Phase angle of the start of the cycle, relative to the reference angle.
+ ///
+ public Angle Phase
+ {
+ get
+ {
+ return Complex.Angle;
+ }
+ set
+ {
+ Complex.Angle = value;
+ }
+ }
+
+ #endregion
+ }
+}
\ No newline at end of file
diff --git a/src/Libraries/FaultAlgorithms/CycleData.cs b/src/Libraries/FaultAlgorithms/CycleData.cs
new file mode 100644
index 00000000..610d5960
--- /dev/null
+++ b/src/Libraries/FaultAlgorithms/CycleData.cs
@@ -0,0 +1,177 @@
+//*********************************************************************************************************************
+// CycleData.cs
+// Version 1.1 and subsequent releases
+//
+// Copyright © 2013, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Version 1.0
+//
+// Copyright 2012 ELECTRIC POWER RESEARCH INSTITUTE, INC. All rights reserved.
+//
+// openFLE ("this software") is licensed under BSD 3-Clause license.
+//
+// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+// following conditions are met:
+//
+// • Redistributions of source code must retain the above copyright notice, this list of conditions and
+// the following disclaimer.
+//
+// • Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
+// the following disclaimer in the documentation and/or other materials provided with the distribution.
+//
+// • Neither the name of the Electric Power Research Institute, Inc. (“EPRI”) nor the names of its contributors
+// may be used to endorse or promote products derived from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL EPRI BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+// POSSIBILITY OF SUCH DAMAGE.
+//
+//
+// This software incorporates work covered by the following copyright and permission notice:
+//
+// • TVA Code Library 4.0.4.3 - Tennessee Valley Authority, tvainfo@tva.gov
+// No copyright is claimed pursuant to 17 USC § 105. All Other Rights Reserved.
+//
+// Licensed under TVA Custom License based on NASA Open Source Agreement (TVA Custom NOSA);
+// you may not use TVA Code Library except in compliance with the TVA Custom NOSA. You may
+// obtain a copy of the TVA Custom NOSA at http://tvacodelibrary.codeplex.com/license.
+//
+// TVA Code Library is provided by the copyright holders and contributors "as is" and any express
+// or implied warranties, including, but not limited to, the implied warranties of merchantability
+// and fitness for a particular purpose are disclaimed.
+//
+//*********************************************************************************************************************
+//
+// Code Modification History:
+// -------------------------------------------------------------------------------------------------------------------
+// 06/14/2012 - Stephen C. Wills, Grid Protection Alliance
+// Generated original version of source code.
+//
+//*********************************************************************************************************************
+
+using Gemstone.Numeric;
+
+namespace FaultAlgorithms
+{
+ ///
+ /// Contains data for a single cycle over all three line-to-neutral conductors.
+ ///
+ public class CycleData
+ {
+ #region [ Members ]
+
+ // Constants
+
+ ///
+ /// 2 * pi
+ ///
+ public const double TwoPI = 2.0D * Math.PI;
+
+ // a = e^((2/3) * pi * i)
+ private const double Rad120 = TwoPI / 3.0D;
+ private static readonly ComplexNumber a = new ComplexNumber(Math.Cos(Rad120), Math.Sin(Rad120));
+ private static readonly ComplexNumber aSq = a * a;
+
+ // Fields
+
+ ///
+ /// A-to-neutral conductor
+ ///
+ public Conductor AN;
+
+ ///
+ /// B-to-neutral conductor
+ ///
+ public Conductor BN;
+
+ ///
+ /// C-to-neutral conductor
+ ///
+ public Conductor CN;
+
+ ///
+ /// Timestamp of the start of the cycle.
+ ///
+ public DateTime StartTime;
+
+ #endregion
+
+ #region [ Constructors ]
+
+ ///
+ /// Creates a new instance of the class.
+ ///
+ public CycleData()
+ {
+ AN = new Conductor();
+ BN = new Conductor();
+ CN = new Conductor();
+ }
+
+ ///
+ /// Creates a new instance of the class.
+ ///
+ /// The index of the cycle being created.
+ /// The value to divide from the sample rate to determine the index of the sample at the start of the cycle.
+ /// The frequency of the measured system, in Hz.
+ /// The data set containing voltage measurements.
+ /// The data set containing current measurements.
+ public CycleData(int cycleIndex, int sampleRateDivisor, double frequency, MeasurementDataSet voltageDataSet, MeasurementDataSet currentDataSet)
+ {
+ int sampleIndex;
+
+ AN = new Conductor(cycleIndex, sampleRateDivisor, frequency, voltageDataSet.AN, currentDataSet.AN);
+ BN = new Conductor(cycleIndex, sampleRateDivisor, frequency, voltageDataSet.BN, currentDataSet.BN);
+ CN = new Conductor(cycleIndex, sampleRateDivisor, frequency, voltageDataSet.CN, currentDataSet.CN);
+
+ sampleIndex = cycleIndex * (voltageDataSet.AN.SampleRate / sampleRateDivisor);
+ StartTime = new DateTime(voltageDataSet.AN.Times[sampleIndex]);
+ }
+
+ #endregion
+
+ #region [ Methods ]
+
+ ///
+ /// Calculates the positive, negative, and zero sequence components
+ /// and returns them in an array with indexes 1, 2, and 0 respectively.
+ ///
+ /// The cycle of A-to-neutral data to be used.
+ /// The cycle of B-to-neutral data to be used.
+ /// The cycle of C-to-neutral data to be used.
+ /// An array of size 3 containing the zero sequence, positive sequence, and negative sequence components in that order.
+ public static ComplexNumber[] CalculateSequenceComponents(Cycle anCycle, Cycle bnCycle, Cycle cnCycle)
+ {
+ ComplexNumber an = anCycle.Complex;
+ ComplexNumber bn = bnCycle.Complex;
+ ComplexNumber cn = cnCycle.Complex;
+
+ ComplexNumber[] sequenceComponents = new ComplexNumber[3];
+
+ sequenceComponents[0] = (an + bn + cn) / 3.0D;
+ sequenceComponents[1] = (an + a * bn + aSq * cn) / 3.0D;
+ sequenceComponents[2] = (an + aSq * bn + a * cn) / 3.0D;
+
+ return sequenceComponents;
+ }
+
+ #endregion
+ }
+}
\ No newline at end of file
diff --git a/src/Libraries/FaultAlgorithms/CycleDataSet.cs b/src/Libraries/FaultAlgorithms/CycleDataSet.cs
new file mode 100644
index 00000000..34b810b7
--- /dev/null
+++ b/src/Libraries/FaultAlgorithms/CycleDataSet.cs
@@ -0,0 +1,303 @@
+//*********************************************************************************************************************
+// CycleDataSet.cs
+// Version 1.1 and subsequent releases
+//
+// Copyright © 2013, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Version 1.0
+//
+// Copyright 2012 ELECTRIC POWER RESEARCH INSTITUTE, INC. All rights reserved.
+//
+// openFLE ("this software") is licensed under BSD 3-Clause license.
+//
+// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+// following conditions are met:
+//
+// • Redistributions of source code must retain the above copyright notice, this list of conditions and
+// the following disclaimer.
+//
+// • Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
+// the following disclaimer in the documentation and/or other materials provided with the distribution.
+//
+// • Neither the name of the Electric Power Research Institute, Inc. (“EPRI”) nor the names of its contributors
+// may be used to endorse or promote products derived from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL EPRI BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+// POSSIBILITY OF SUCH DAMAGE.
+//
+//
+// This software incorporates work covered by the following copyright and permission notice:
+//
+// • TVA Code Library 4.0.4.3 - Tennessee Valley Authority, tvainfo@tva.gov
+// No copyright is claimed pursuant to 17 USC § 105. All Other Rights Reserved.
+//
+// Licensed under TVA Custom License based on NASA Open Source Agreement (TVA Custom NOSA);
+// you may not use TVA Code Library except in compliance with the TVA Custom NOSA. You may
+// obtain a copy of the TVA Custom NOSA at http://tvacodelibrary.codeplex.com/license.
+//
+// TVA Code Library is provided by the copyright holders and contributors "as is" and any express
+// or implied warranties, including, but not limited to, the implied warranties of merchantability
+// and fitness for a particular purpose are disclaimed.
+//
+//*********************************************************************************************************************
+//
+// Code Modification History:
+// -------------------------------------------------------------------------------------------------------------------
+// 06/14/2012 - Stephen C. Wills, Grid Protection Alliance
+// Generated original version of source code.
+//
+//*********************************************************************************************************************
+
+using System.Collections;
+using Gemstone.Numeric;
+using Gemstone.Numeric.Analysis;
+
+namespace FaultAlgorithms
+{
+ ///
+ /// Represents a collection of all the cycles extracted from a given data set.
+ ///
+ public class CycleDataSet : IEnumerable
+ {
+ #region [ Members ]
+
+ // Fields
+ private List m_cycles;
+
+ #endregion
+
+ #region [ Constructors ]
+
+ ///
+ /// Creates a new instance of the class.
+ ///
+ public CycleDataSet()
+ {
+ m_cycles = new List();
+ }
+
+ ///
+ /// Creates a new instance of the class.
+ ///
+ /// The frequency of the measured system, in Hz.
+ /// The data set containing voltage data points.
+ /// The data set containing current data points.
+ public CycleDataSet(double frequency, MeasurementDataSet voltageDataSet, MeasurementDataSet currentDataSet)
+ {
+ Populate(frequency, voltageDataSet, currentDataSet);
+ }
+
+ #endregion
+
+ #region [ Properties ]
+
+ ///
+ /// Gets or sets the data structure containing a
+ /// full cycle of data at the given index.
+ ///
+ /// The index of the cycle.
+ /// The cycle of data at the given index.
+ public CycleData this[int i]
+ {
+ get
+ {
+ return m_cycles[i];
+ }
+ set
+ {
+ while(i >= m_cycles.Count)
+ m_cycles.Add(null);
+
+ m_cycles[i] = value;
+ }
+ }
+
+ ///
+ /// Gets the size of the cycle data set.
+ ///
+ public int Count
+ {
+ get
+ {
+ return m_cycles.Count;
+ }
+ }
+
+ #endregion
+
+ #region [ Methods ]
+
+ ///
+ /// Populates the cycle data set by calculating cycle
+ /// data based on the given measurement data sets.
+ ///
+ /// The frequency of the measured system, in Hz.
+ /// Data set containing voltage waveform measurements.
+ /// Data set containing current waveform measurements.
+ public void Populate(double frequency, MeasurementDataSet voltageDataSet, MeasurementDataSet currentDataSet)
+ {
+ List measurementDataList;
+ int sampleRateDivisor;
+ int numberOfCycles;
+
+ measurementDataList = new List()
+ {
+ voltageDataSet.AN, voltageDataSet.BN, voltageDataSet.CN,
+ currentDataSet.AN, currentDataSet.BN, currentDataSet.CN
+ };
+
+ sampleRateDivisor = measurementDataList
+ .Select(measurementData => measurementData.SampleRate)
+ .GreatestCommonDenominator();
+
+ numberOfCycles = measurementDataList
+ .Select(measurementData => (measurementData.Measurements.Length - measurementData.SampleRate + 1) / (measurementData.SampleRate / sampleRateDivisor))
+ .Min();
+
+ for (int i = 0; i < numberOfCycles; i++)
+ m_cycles.Add(new CycleData(i, sampleRateDivisor, frequency, voltageDataSet, currentDataSet));
+ }
+
+ ///
+ /// Returns the index of the cycle with the largest total current.
+ ///
+ /// The index of the cycle with the largest total current.
+ public int GetLargestCurrentIndex()
+ {
+ int index = 0;
+ int bestFaultIndex = -1;
+ double largestCurrent = 0.0D;
+
+ foreach (CycleData cycle in m_cycles)
+ {
+ double totalCurrent = cycle.AN.I.RMS + cycle.BN.I.RMS + cycle.CN.I.RMS;
+
+ if (totalCurrent > largestCurrent)
+ {
+ bestFaultIndex = index;
+ largestCurrent = totalCurrent;
+ }
+
+ index++;
+ }
+
+ return bestFaultIndex;
+ }
+
+ ///
+ /// Clears the cycle data set so that it can be repopulated.
+ ///
+ public void Clear()
+ {
+ m_cycles.Clear();
+ }
+
+ ///
+ /// Returns an enumerator that iterates through the collection of cycles.
+ ///
+ /// An object that can be used to iterate through the collection.
+ public IEnumerator GetEnumerator()
+ {
+ foreach (CycleData cycle in m_cycles)
+ {
+ yield return cycle;
+ }
+ }
+
+ ///
+ /// Returns an enumerator that iterates through the collection of cycles.
+ ///
+ /// An object that can be used to iterate through the collection.
+ IEnumerator IEnumerable.GetEnumerator()
+ {
+ return GetEnumerator();
+ }
+
+ #endregion
+
+ #region [ Static ]
+
+ // Static Methods
+
+ ///
+ /// Exports the given to a CSV file.
+ ///
+ /// The name of the CSV file.
+ /// The cycle data set to be exported.
+ public static void ExportToCSV(string fileName, CycleDataSet cycles)
+ {
+ const string Header =
+ "AN V RMS,AN V Phase,AN V Peak," +
+ "BN V RMS,BN V Phase,BN V Peak," +
+ "CN V RMS,CN V Phase,CN V Peak," +
+ "Pos V Magnitude,Pos V Angle," +
+ "Neg V Magnitude,Neg V Angle," +
+ "Zero V Magnitude,Zero V Angle," +
+ "AN I RMS,AN I Phase,AN I Peak," +
+ "BN I RMS,BN I Phase,BN I Peak," +
+ "CN I RMS,CN I Phase,CN I Peak," +
+ "Pos I Magnitude,Pos I Angle," +
+ "Neg I Magnitude,Neg I Angle," +
+ "Zero I Magnitude,Zero I Angle";
+
+ using (FileStream fileStream = File.OpenWrite(fileName))
+ {
+ using (TextWriter fileWriter = new StreamWriter(fileStream))
+ {
+ // Write the CSV header to the file
+ fileWriter.WriteLine(Header);
+
+ // Write data to the file
+ foreach (CycleData cycleData in cycles.m_cycles)
+ fileWriter.WriteLine(ToCSV(cycleData));
+ }
+ }
+ }
+
+ // Converts the cycle data to a row of CSV data.
+ private static string ToCSV(CycleData cycleData)
+ {
+ ComplexNumber[] vSeq = CycleData.CalculateSequenceComponents(cycleData.AN.V, cycleData.BN.V, cycleData.CN.V);
+ ComplexNumber[] iSeq = CycleData.CalculateSequenceComponents(cycleData.AN.I, cycleData.BN.I, cycleData.CN.I);
+
+ string vCsv = string.Format("{0},{1},{2}", ToCSV(cycleData.AN.V), ToCSV(cycleData.BN.V), ToCSV(cycleData.CN.V));
+ string vSeqCsv = string.Format("{0},{1},{2}", ToCSV(vSeq[1]), ToCSV(vSeq[2]), ToCSV(vSeq[0]));
+ string iCsv = string.Format("{0},{1},{2}", ToCSV(cycleData.AN.I), ToCSV(cycleData.BN.I), ToCSV(cycleData.CN.I));
+ string iSeqCsv = string.Format("{0},{1},{2}", ToCSV(iSeq[1]), ToCSV(iSeq[2]), ToCSV(iSeq[0]));
+
+ return string.Format("{0},{1},{2},{3}", vCsv, vSeqCsv, iCsv, iSeqCsv);
+ }
+
+ // Converts the cycle to CSV data.
+ private static string ToCSV(Cycle cycle)
+ {
+ return string.Format("{0},{1},{2}", cycle.RMS, cycle.Phase.ToDegrees(), cycle.Peak);
+ }
+
+ // Converts the sequence component to CSV data.
+ private static string ToCSV(ComplexNumber sequenceComponent)
+ {
+ return string.Format("{0},{1}", sequenceComponent.Magnitude, sequenceComponent.Angle.ToDegrees());
+ }
+
+ #endregion
+ }
+}
diff --git a/src/Libraries/FaultAlgorithms/FaultAlgorithms.csproj b/src/Libraries/FaultAlgorithms/FaultAlgorithms.csproj
new file mode 100644
index 00000000..bc767802
--- /dev/null
+++ b/src/Libraries/FaultAlgorithms/FaultAlgorithms.csproj
@@ -0,0 +1,13 @@
+
+
+
+ net9.0
+ enable
+ enable
+
+
+
+
+
+
+
diff --git a/src/Libraries/FaultAlgorithms/MeasurementData.cs b/src/Libraries/FaultAlgorithms/MeasurementData.cs
new file mode 100644
index 00000000..04da176a
--- /dev/null
+++ b/src/Libraries/FaultAlgorithms/MeasurementData.cs
@@ -0,0 +1,97 @@
+//*********************************************************************************************************************
+// MeasurementData.cs
+// Version 1.1 and subsequent releases
+//
+// Copyright 2013, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Version 1.0
+//
+// Copyright 2012 ELECTRIC POWER RESEARCH INSTITUTE, INC. All rights reserved.
+//
+// openFLE ("this software") is licensed under BSD 3-Clause license.
+//
+// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+// following conditions are met:
+//
+// Redistributions of source code must retain the above copyright notice, this list of conditions and
+// the following disclaimer.
+//
+// Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
+// the following disclaimer in the documentation and/or other materials provided with the distribution.
+//
+// Neither the name of the Electric Power Research Institute, Inc. (EPRI) nor the names of its contributors
+// may be used to endorse or promote products derived from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL EPRI BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+// POSSIBILITY OF SUCH DAMAGE.
+//
+//
+// This software incorporates work covered by the following copyright and permission notice:
+//
+// TVA Code Library 4.0.4.3 - Tennessee Valley Authority, tvainfo@tva.gov
+// No copyright is claimed pursuant to 17 USC 105. All Other Rights Reserved.
+//
+// Licensed under TVA Custom License based on NASA Open Source Agreement (TVA Custom NOSA);
+// you may not use TVA Code Library except in compliance with the TVA Custom NOSA. You may
+// obtain a copy of the TVA Custom NOSA at http://tvacodelibrary.codeplex.com/license.
+//
+// TVA Code Library is provided by the copyright holders and contributors "as is" and any express
+// or implied warranties, including, but not limited to, the implied warranties of merchantability
+// and fitness for a particular purpose are disclaimed.
+//
+//*********************************************************************************************************************
+//
+// Code Modification History:
+// -------------------------------------------------------------------------------------------------------------------
+// 05/23/2012 - J. Ritchie Carroll, Grid Protection Alliance
+// Generated original version of source code.
+//
+//*********************************************************************************************************************
+
+namespace FaultAlgorithms
+{
+ ///
+ /// Represents a set of single phase power time-domain data.
+ ///
+ public class MeasurementData
+ {
+ #region [ Members ]
+
+ // Fields
+
+ ///
+ /// Array of times in ticks (100 nanosecond intervals).
+ ///
+ public long[] Times;
+
+ ///
+ /// Array of measured values.
+ ///
+ public double[] Measurements;
+
+ ///
+ /// The number of measured samples per cycle of data.
+ ///
+ public int SampleRate;
+
+ #endregion
+ }
+}
\ No newline at end of file
diff --git a/src/Libraries/FaultAlgorithms/MeasurementDataSet.cs b/src/Libraries/FaultAlgorithms/MeasurementDataSet.cs
new file mode 100644
index 00000000..1a6c0fb0
--- /dev/null
+++ b/src/Libraries/FaultAlgorithms/MeasurementDataSet.cs
@@ -0,0 +1,272 @@
+//*********************************************************************************************************************
+// MeasurementDataSet.cs
+// Version 1.1 and subsequent releases
+//
+// Copyright 2013, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Version 1.0
+//
+// Copyright 2012 ELECTRIC POWER RESEARCH INSTITUTE, INC. All rights reserved.
+//
+// openFLE ("this software") is licensed under BSD 3-Clause license.
+//
+// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+// following conditions are met:
+//
+// Redistributions of source code must retain the above copyright notice, this list of conditions and
+// the following disclaimer.
+//
+// Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
+// the following disclaimer in the documentation and/or other materials provided with the distribution.
+//
+// Neither the name of the Electric Power Research Institute, Inc. (EPRI) nor the names of its contributors
+// may be used to endorse or promote products derived from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL EPRI BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+// POSSIBILITY OF SUCH DAMAGE.
+//
+//
+// This software incorporates work covered by the following copyright and permission notice:
+//
+// TVA Code Library 4.0.4.3 - Tennessee Valley Authority, tvainfo@tva.gov
+// No copyright is claimed pursuant to 17 USC 105. All Other Rights Reserved.
+//
+// Licensed under TVA Custom License based on NASA Open Source Agreement (TVA Custom NOSA);
+// you may not use TVA Code Library except in compliance with the TVA Custom NOSA. You may
+// obtain a copy of the TVA Custom NOSA at http://tvacodelibrary.codeplex.com/license.
+//
+// TVA Code Library is provided by the copyright holders and contributors "as is" and any express
+// or implied warranties, including, but not limited to, the implied warranties of merchantability
+// and fitness for a particular purpose are disclaimed.
+//
+//*********************************************************************************************************************
+//
+// Code Modification History:
+// -------------------------------------------------------------------------------------------------------------------
+// 05/23/2012 - J. Ritchie Carroll, Grid Protection Alliance
+// Generated original version of source code.
+//
+//*********************************************************************************************************************
+
+using Gemstone;
+
+namespace FaultAlgorithms
+{
+ ///
+ /// Represents a set of 3-phase line-to-neutral and line-to-line time-domain power data.
+ ///
+ public class MeasurementDataSet
+ {
+ #region [ Members ]
+
+ // Constants
+
+ private const string DateTimeFormat = "yyyy-MM-dd HH:mm:ss.ffffff";
+
+ // Fields
+
+ ///
+ /// Line-to-neutral A-phase data.
+ ///
+ public MeasurementData AN;
+
+ ///
+ /// Line-to-neutral B-phase data.
+ ///
+ public MeasurementData BN;
+
+ ///
+ /// Line-to-neutral C-phase data.
+ ///
+ public MeasurementData CN;
+
+ #endregion
+
+ #region [ Constructors ]
+
+ ///
+ /// Creates a new .
+ ///
+ public MeasurementDataSet()
+ {
+ AN = new MeasurementData();
+ BN = new MeasurementData();
+ CN = new MeasurementData();
+ }
+
+ #endregion
+
+ #region [ Methods ]
+
+ ///
+ /// Uses system frequency to calculate the sample rate for each set
+ /// of in this measurement data set.
+ ///
+ /// The frequency of the measured system, in Hz.
+ public void CalculateSampleRates(double frequency)
+ {
+ CalculateSampleRate(frequency, AN);
+ CalculateSampleRate(frequency, BN);
+ CalculateSampleRate(frequency, CN);
+ }
+
+ ///
+ /// Explicitly sets the sample rate for each set of
+ /// in this measurement data set.
+ ///
+ /// The sample rate.
+ public void SetSampleRate(int sampleRate)
+ {
+ AN.SampleRate = sampleRate;
+ BN.SampleRate = sampleRate;
+ CN.SampleRate = sampleRate;
+ }
+
+ ///
+ /// Writes all voltage measurement data to a CSV file.
+ ///
+ /// Export file name.
+ public void ExportVoltageDataToCSV(string fileName)
+ {
+ const string Header = "Time,AN,BN,CN,AB,BC,CA";
+
+ using (FileStream fileStream = File.OpenWrite(fileName))
+ {
+ using (TextWriter fileWriter = new StreamWriter(fileStream))
+ {
+ // Write the CSV header to the file
+ fileWriter.WriteLine(Header);
+
+ // Write the data to the file
+ for (int i = 0; i < AN.Times.Length; i++)
+ {
+ string time = new DateTime(AN.Times[i]).ToString(DateTimeFormat);
+
+ double an = AN.Measurements[i];
+ double bn = BN.Measurements[i];
+ double cn = CN.Measurements[i];
+
+ fileWriter.Write("{0},{1},{2},{3},", time, an, bn, cn);
+ fileWriter.WriteLine("{0},{1},{2}", an - bn, bn - cn, cn - an);
+ }
+ }
+ }
+ }
+
+ ///
+ /// Writes all current measurement data to a CSV file.
+ ///
+ /// Export file name.
+ public void ExportCurrentDataToCSV(string fileName)
+ {
+ const string Header = "Time,AN,BN,CN";
+
+ using (FileStream fileStream = File.OpenWrite(fileName))
+ {
+ using (TextWriter fileWriter = new StreamWriter(fileStream))
+ {
+ // Write the CSV header to the file
+ fileWriter.WriteLine(Header);
+
+ // Write the data to the file
+ for (int i = 0; i < AN.Times.Length; i++)
+ {
+ string time = new DateTime(AN.Times[i]).ToString(DateTimeFormat);
+
+ double an = AN.Measurements[i];
+ double bn = BN.Measurements[i];
+ double cn = CN.Measurements[i];
+
+ fileWriter.WriteLine("{0},{1},{2},{3}", time, an, bn, cn);
+ }
+ }
+ }
+ }
+
+ private void CalculateSampleRate(double frequency, MeasurementData measurementData)
+ {
+ long[] times;
+ long startTicks;
+ long endTicks;
+ double cycles;
+
+ // Get the collection of measurement timestamps
+ times = measurementData.Times;
+
+ // Determine the start and end time of the data set
+ startTicks = times[0];
+ endTicks = times[times.Length - 1];
+
+ // Determine the number of cycles in the file,
+ // based on the system frequency
+ cycles = frequency * Ticks.ToSeconds(endTicks - startTicks);
+
+ // Calculate the number of samples per cycle
+ measurementData.SampleRate = (int)Math.Round(times.Length / cycles);
+ }
+
+ #endregion
+
+ #region [ Static ]
+
+ // Static Methods
+
+ ///
+ /// Writes all measurement data to a CSV file.
+ ///
+ /// Export file name.
+ /// The voltage measurement data to be written to the file.
+ /// The current measurement data to be written to the file.
+ public static void ExportToCSV(string fileName, MeasurementDataSet voltageData, MeasurementDataSet currentData)
+ {
+ const string Header = "Time,AN V,BN V,CN V,AB V,BC V,CA V,AN I,BN I,CN I";
+
+ using (FileStream fileStream = File.Create(fileName))
+ {
+ using (TextWriter fileWriter = new StreamWriter(fileStream))
+ {
+ // Write the CSV header to the file
+ fileWriter.WriteLine(Header);
+
+ // Write the data to the file
+ for (int i = 0; i < voltageData.AN.Times.Length; i++)
+ {
+ string time = new DateTime(voltageData.AN.Times[i]).ToString(DateTimeFormat);
+
+ double vAN = voltageData.AN.Measurements[i];
+ double vBN = voltageData.BN.Measurements[i];
+ double vCN = voltageData.CN.Measurements[i];
+
+ double iAN = currentData.AN.Measurements[i];
+ double iBN = currentData.BN.Measurements[i];
+ double iCN = currentData.CN.Measurements[i];
+
+ fileWriter.Write("{0},{1},{2},{3},", time, vAN, vBN, vCN);
+ fileWriter.Write("{0},{1},{2},", vAN - vBN, vBN - vCN, vCN - vAN);
+ fileWriter.WriteLine("{0},{1},{2}", iAN, iBN, iCN);
+ }
+ }
+ }
+ }
+
+ #endregion
+ }
+}
\ No newline at end of file
diff --git a/src/Libraries/FaultData/DataAnalysis/CycleDataGroup.cs b/src/Libraries/FaultData/DataAnalysis/CycleDataGroup.cs
new file mode 100644
index 00000000..3129dd76
--- /dev/null
+++ b/src/Libraries/FaultData/DataAnalysis/CycleDataGroup.cs
@@ -0,0 +1,115 @@
+//******************************************************************************************************
+// CycleDataGroup.cs - Gbtc
+//
+// Copyright © 2014, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/29/2014 - Stephen C. Wills
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using openXDA.Model;
+
+namespace FaultData.DataAnalysis
+{
+ public class CycleDataGroup
+ {
+ #region [ Members ]
+
+ // Constants
+ private const int RMSIndex = 0;
+ private const int PhaseIndex = 1;
+ private const int PeakIndex = 2;
+ private const int ErrorIndex = 3;
+ private Asset m_asset;
+ // Fields
+ private DataGroup m_dataGroup;
+
+ #endregion
+
+ #region [ Constructors ]
+
+ public CycleDataGroup(DataGroup dataGroup, Asset asset)
+ {
+ m_dataGroup = dataGroup;
+ m_asset = asset;
+ }
+
+ #endregion
+
+ #region [ Properties ]
+
+ public DataSeries RMS
+ {
+ get
+ {
+ return m_dataGroup[RMSIndex];
+ }
+ }
+
+ public DataSeries Phase
+ {
+ get
+ {
+ return m_dataGroup[PhaseIndex];
+ }
+ }
+
+ public DataSeries Peak
+ {
+ get
+ {
+ return m_dataGroup[PeakIndex];
+ }
+ }
+
+ public DataSeries Error
+ {
+ get
+ {
+ return m_dataGroup[ErrorIndex];
+ }
+ }
+
+ public Asset Asset
+ {
+ get
+ {
+ return m_asset;
+ }
+ }
+ #endregion
+
+ #region [ Methods ]
+
+ public DataGroup ToDataGroup()
+ {
+ return m_dataGroup;
+ }
+
+ public CycleDataGroup ToSubGroup(int startIndex, int endIndex)
+ {
+ return new CycleDataGroup(m_dataGroup.ToSubGroup(startIndex, endIndex), m_asset);
+ }
+
+ public CycleDataGroup ToSubGroup(DateTime startTime, DateTime endTime)
+ {
+ return new CycleDataGroup(m_dataGroup.ToSubGroup(startTime, endTime), m_asset);
+ }
+
+ #endregion
+ }
+}
diff --git a/src/Libraries/FaultData/DataAnalysis/DataGroup.cs b/src/Libraries/FaultData/DataAnalysis/DataGroup.cs
new file mode 100644
index 00000000..20d91b75
--- /dev/null
+++ b/src/Libraries/FaultData/DataAnalysis/DataGroup.cs
@@ -0,0 +1,662 @@
+//******************************************************************************************************
+// DataGroup.cs - Gbtc
+//
+// Copyright © 2014, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 05/19/2014 - Stephen C. Wills
+// Generated original version of source code.
+// 12/23/2019 - C. Lackner
+// Adjusted to read data from blob for each dataseries.
+//
+//******************************************************************************************************
+
+using System.Data;
+using Gemstone;
+using Gemstone.Data.Model;
+using Ionic.Zlib;
+using Microsoft.Data.SqlClient;
+using openXDA.Model;
+
+namespace FaultData.DataAnalysis
+{
+ public enum DataClassification
+ {
+ Trend,
+ Event,
+ FastRMS,
+ Unknown
+ }
+
+ public class DataGroup
+ {
+ #region [ Members ]
+
+ // Constants
+
+ ///
+ /// Maximum sample rate, in samples per minute, of data classified as .
+ ///
+ public const double TrendThreshold = 1.0D;
+
+ // Fields
+ private Asset m_asset;
+ private DateTime m_startTime;
+ private DateTime m_endTime;
+ private int m_samples;
+
+ private List m_dataSeries;
+ private List m_disturbances;
+ private DataClassification m_classification;
+
+ #endregion
+
+ #region [ Constructors ]
+
+ ///
+ /// Creates a new instance of the class.
+ ///
+ public DataGroup()
+ {
+ m_dataSeries = new List();
+ m_disturbances = new List();
+ m_classification = DataClassification.Unknown;
+ m_asset = null;
+ }
+
+ ///
+ /// Creates a new instance of the class.
+ ///
+ /// Asset associated with this datagroup
+ public DataGroup(Asset asset)
+ {
+ m_dataSeries = new List();
+ m_disturbances = new List();
+ m_classification = DataClassification.Unknown;
+ m_asset = asset;
+ }
+
+ ///
+ /// Creates a new instance of the class.
+ ///
+ /// Collection of data series to be added to the data group.
+ public DataGroup(IEnumerable dataSeries)
+ : this()
+ {
+ foreach (DataSeries series in dataSeries)
+ Add(series);
+ }
+
+ ///
+ /// Creates a new instance of the class.
+ ///
+ /// Collection of data series to be added to the data group.
+ /// Asset associated with this datagroup
+ public DataGroup(IEnumerable dataSeries, Asset asset)
+ : this(asset)
+ {
+ foreach (DataSeries series in dataSeries)
+ Add(series);
+ }
+
+ #endregion
+
+ #region [ Properties ]
+
+ ///
+ /// Gets the line from which measurements were taken to create the group of data.
+ ///
+ public Asset Asset
+ {
+ get
+ {
+ return m_asset;
+ }
+ }
+
+ ///
+ /// Gets the start time of the group of data.
+ ///
+ public DateTime StartTime
+ {
+ get
+ {
+ return m_startTime;
+ }
+ }
+
+ ///
+ /// Gets the end time of the group of data.
+ ///
+ public DateTime EndTime
+ {
+ get
+ {
+ return m_endTime;
+ }
+ }
+
+ ///
+ /// Gets the number of samples in each series.
+ ///
+ public int Samples
+ {
+ get
+ {
+ return m_samples;
+ }
+ }
+
+ ///
+ /// Gets the sample rate, in samples per second,
+ /// of the data series in this data group.
+ ///
+ public double SamplesPerSecond
+ {
+ get
+ {
+ if (!m_dataSeries.Any())
+ return double.NaN;
+
+ return m_dataSeries[0].SampleRate;
+ }
+ }
+
+ ///
+ /// Gets the duration, in seconds,
+ /// of the data series in this data group.
+ ///
+ public double Duration
+ {
+ get
+ {
+ if (!m_dataSeries.Any())
+ return double.NaN;
+
+ return m_dataSeries[0].Duration;
+ }
+ }
+
+ ///
+ /// Gets the sample rate, in samples per hour,
+ /// of the data series in this data group.
+ ///
+ public double SamplesPerHour
+ {
+ get
+ {
+ return (m_samples - 1) / (m_endTime - m_startTime).TotalHours;
+ }
+ }
+
+ ///
+ /// Gets flag that indicates whether the data series
+ /// in this data group are marked as trend channels.
+ ///
+ public bool Trend =>
+ m_dataSeries.Any(dataSeries => dataSeries.SeriesInfo?.Channel.Trend == true);
+
+ ///
+ /// Gets the channels contained in this data group.
+ ///
+ public IReadOnlyList DataSeries
+ {
+ get
+ {
+ return m_dataSeries.AsReadOnly();
+ }
+ }
+
+ ///
+ /// Gets the disturbances contained in this data group.
+ ///
+ public IReadOnlyList Disturbances
+ {
+ get
+ {
+ return m_disturbances.AsReadOnly();
+ }
+ }
+
+ ///
+ /// Gets the classification of this group of data as of the last call to .
+ ///
+ public DataClassification Classification
+ {
+ get
+ {
+ if (m_classification == DataClassification.Unknown)
+ Classify();
+
+ return m_classification;
+ }
+ }
+
+ public DataSeries this[int index]
+ {
+ get
+ {
+ return m_dataSeries[index];
+ }
+ }
+
+ #endregion
+
+ #region [ Methods ]
+
+ ///
+ /// Adds a channel to the group of data.
+ ///
+ /// The channel to be added to the group.
+ ///
+ /// True if the channel was successfully added. False if the channel was excluded
+ /// because the channel does not match the other channels already in the data group.
+ ///
+ public bool Add(DataSeries dataSeries)
+ {
+ Asset asset;
+ DateTime startTime;
+ DateTime endTime;
+ int samples;
+ bool trend;
+
+ // Unable to add null data series
+ if ((object)dataSeries == null)
+ return false;
+
+ // Data series without data is irrelevant to data grouping
+ if (!dataSeries.DataPoints.Any())
+ return false;
+
+ // Do not add the same data series twice
+ if (m_dataSeries.Contains(dataSeries))
+ return false;
+
+ // Get information about the line this data is associated with
+ if ((object)dataSeries.SeriesInfo != null)
+ asset = dataSeries.SeriesInfo.Channel.Asset;
+ else
+ asset = null;
+
+ // Get the start time, end time, number of samples, and
+ // trend flag for the data series passed into this function
+ startTime = dataSeries.DataPoints[0].Time;
+ endTime = dataSeries.DataPoints[dataSeries.DataPoints.Count - 1].Time;
+ samples = dataSeries.DataPoints.Count;
+ trend = dataSeries.SeriesInfo?.Channel.Trend == true;
+
+ // If there are any disturbances in this data group that do not overlap
+ // with the data series, do not include the data series in the data group
+ if (m_disturbances.Select(disturbance => disturbance.ToRange()).Any(range => range.Start > endTime || range.End < startTime))
+ return false;
+
+ // If there are any disturbances associated with the data in this group and the data
+ // to be added is trending data, do not include the trending data in the data group
+ if (m_disturbances.Any() && CalculateSamplesPerMinute(startTime, endTime, samples) <= TrendThreshold)
+ return false;
+
+ // At this point, if there is no existing data in the data
+ // group, add the data as the first series in the data group
+ if (m_dataSeries.Count == 0)
+ {
+ if (m_asset == null)
+ {
+ m_asset = asset;
+ }
+ m_startTime = startTime;
+ m_endTime = endTime;
+ m_samples = samples;
+
+ m_dataSeries.Add(dataSeries);
+ m_classification = DataClassification.Unknown;
+
+ return true;
+ }
+
+ // If the data being added matches the parameters for this data group, add the data to the data group
+ // Note that it does not have to match Asset
+ if (startTime == m_startTime && endTime == m_endTime && samples == m_samples && trend == Trend)
+ {
+ m_dataSeries.Add(dataSeries);
+ return true;
+ }
+
+ return false;
+ }
+
+ ///
+ /// Adds a disturbance to the group of data.
+ ///
+ /// The disturbance to be added to the group.
+ /// True if the disturbance was successfully added.
+ public bool Add(ReportedDisturbance disturbance)
+ {
+ // Unable to add null disturbance
+ if ((object)disturbance == null)
+ return false;
+
+ // Do not add the same disturbance twice
+ if (m_disturbances.Contains(disturbance))
+ return false;
+
+ // If the data in this data group is trending data,
+ // do not add the disturbance to the data group
+ if (Classification == DataClassification.Trend)
+ return false;
+
+ // Get the start time and end time of the disturbance.
+ DateTime startTime = disturbance.Time;
+ DateTime endTime = startTime + disturbance.Duration;
+
+ // If there are no data series and no other disturbances,
+ // make this the first piece of data to be added to the data group
+ if (!m_dataSeries.Any() && !m_disturbances.Any())
+ {
+ m_startTime = startTime;
+ m_endTime = endTime;
+ m_disturbances.Add(disturbance);
+ m_classification = DataClassification.Event;
+ return true;
+ }
+
+ // If the disturbance overlaps with
+ // this data group, add the disturbance
+ if (startTime <= m_endTime && m_startTime <= endTime)
+ {
+ // If the only data in the data group is disturbances,
+ // adjust the start time and end time
+ if (!m_dataSeries.Any() && startTime < m_startTime)
+ m_startTime = startTime;
+
+ if (!m_dataSeries.Any() && endTime > m_endTime)
+ m_endTime = endTime;
+
+ m_disturbances.Add(disturbance);
+ return true;
+ }
+
+ return false;
+ }
+
+ ///
+ /// Removes a channel from the data group.
+ ///
+ /// The channel to be removed from the data group.
+ /// True if the channel existed in the group and was removed; false otherwise.
+ public bool Remove(DataSeries dataSeries)
+ {
+ if (m_dataSeries.Remove(dataSeries))
+ {
+ m_classification = m_disturbances.Any()
+ ? DataClassification.Event
+ : DataClassification.Unknown;
+
+ return true;
+ }
+
+ return false;
+ }
+
+ ///
+ /// Removes a disturbance from the data group.
+ ///
+ /// THe disturbance to be removed from the data group.
+ /// True if the disturbance existed in the group and was removed; false otherwise.
+ public bool Remove(ReportedDisturbance disturbance)
+ {
+ if (m_disturbances.Remove(disturbance))
+ {
+ if (!m_disturbances.Any())
+ m_classification = DataClassification.Unknown;
+
+ return true;
+ }
+
+ return false;
+ }
+
+ public DataGroup ToSubGroup(int startIndex, int endIndex)
+ {
+ DataGroup subGroup = new DataGroup();
+
+ foreach (DataSeries dataSeries in m_dataSeries)
+ subGroup.Add(dataSeries.ToSubSeries(startIndex, endIndex));
+
+ return subGroup;
+ }
+
+ public DataGroup ToSubGroup(DateTime startTime, DateTime endTime)
+ {
+ DataGroup subGroup = new DataGroup();
+
+ foreach (DataSeries dataSeries in m_dataSeries)
+ subGroup.Add(dataSeries.ToSubSeries(startTime, endTime));
+
+ return subGroup;
+ }
+
+ // Overwrite To Data to save Data into ChannelBlob instead of File Blob
+ // This needs to be done to avoid data duplication
+ public Dictionary ToData()
+ {
+ Dictionary result = new Dictionary();
+
+ var timeSeries = m_dataSeries[0].DataPoints
+ .Select(dataPoint => new { Time = dataPoint.Time.Ticks, Compressed = false })
+ .ToList();
+
+ for (int i = 1; i < timeSeries.Count; i++)
+ {
+ long previousTimestamp = m_dataSeries[0][i - 1].Time.Ticks;
+ long timestamp = timeSeries[i].Time;
+ long diff = timestamp - previousTimestamp;
+
+ if (diff >= 0 && diff <= ushort.MaxValue)
+ timeSeries[i] = new { Time = diff, Compressed = true };
+
+
+ }
+
+ int timeSeriesByteLength = timeSeries.Sum(obj => obj.Compressed ? sizeof(ushort) : sizeof(int) + sizeof(long));
+ int dataSeriesByteLength = sizeof(int) + (2 * sizeof(double)) + (m_samples * sizeof(ushort));
+ int totalByteLength = sizeof(int) + timeSeriesByteLength + dataSeriesByteLength;
+
+ foreach (DataSeries dataSeries in m_dataSeries)
+ {
+ byte[] data = new byte[totalByteLength];
+ int offset = 0;
+
+ offset += LittleEndian.CopyBytes(m_samples, data, offset);
+
+ List uncompressedIndexes = timeSeries
+ .Select((obj, Index) => new { obj.Compressed, Index })
+ .Where(obj => !obj.Compressed)
+ .Select(obj => obj.Index)
+ .ToList();
+
+ for (int i = 0; i < uncompressedIndexes.Count; i++)
+ {
+ int index = uncompressedIndexes[i];
+ int nextIndex = (i + 1 < uncompressedIndexes.Count) ? uncompressedIndexes[i + 1] : timeSeries.Count;
+
+ offset += LittleEndian.CopyBytes(nextIndex - index, data, offset);
+ offset += LittleEndian.CopyBytes(timeSeries[index].Time, data, offset);
+
+ for (int j = index + 1; j < nextIndex; j++)
+ offset += LittleEndian.CopyBytes((ushort)timeSeries[j].Time, data, offset);
+ }
+
+
+ if (dataSeries.Calculated) continue;
+
+ const ushort NaNValue = ushort.MaxValue;
+ const ushort MaxCompressedValue = ushort.MaxValue - 1;
+ int seriesID = dataSeries.SeriesInfo?.ID ?? 0;
+ double range = dataSeries.Maximum - dataSeries.Minimum;
+ double decompressionOffset = dataSeries.Minimum;
+ double decompressionScale = range / MaxCompressedValue;
+ double compressionScale = (decompressionScale != 0.0D) ? 1.0D / decompressionScale : 0.0D;
+
+ offset += LittleEndian.CopyBytes(seriesID, data, offset);
+ offset += LittleEndian.CopyBytes(decompressionOffset, data, offset);
+ offset += LittleEndian.CopyBytes(decompressionScale, data, offset);
+
+ foreach (DataPoint dataPoint in dataSeries.DataPoints)
+ {
+ ushort compressedValue = (ushort)Math.Round((dataPoint.Value - decompressionOffset) * compressionScale);
+
+ if (compressedValue == NaNValue)
+ compressedValue--;
+
+ if (double.IsNaN(dataPoint.Value))
+ compressedValue = NaNValue;
+
+ offset += LittleEndian.CopyBytes(compressedValue, data, offset);
+ }
+ byte[] returnArray = GZipStream.CompressBuffer(data);
+ returnArray[0] = 0x44;
+ returnArray[1] = 0x33;
+
+ int dataSeriesID = dataSeries.SeriesInfo?.ID ?? 0;
+ result.Add(dataSeriesID, returnArray);
+ }
+
+ return result ;
+ }
+
+ public void FromData(List data)
+ {
+ FromData(null, data);
+ }
+
+ public void FromData(Meter meter, List dataList)
+ {
+ var decompressed = dataList.SelectMany(d => ChannelData.Decompress(d));
+
+ foreach (Tuple> tuple in decompressed)
+ {
+ DataSeries dataSeries = new DataSeries();
+
+ if (tuple.Item1 > 0 && !(meter is null))
+ dataSeries.SeriesInfo = meter.Series.FirstOrDefault(s => s.ID == tuple.Item1);
+
+ dataSeries.DataPoints = tuple.Item2;
+
+ Add(dataSeries);
+ }
+ }
+
+ private void Classify()
+ {
+ if (IsTrend())
+ m_classification = DataClassification.Trend;
+ else if (IsEvent())
+ m_classification = DataClassification.Event;
+ else if (IsFastRMS())
+ m_classification = DataClassification.FastRMS;
+ else
+ m_classification = DataClassification.Unknown;
+ }
+
+ private bool IsTrend()
+ {
+ if (!m_dataSeries.Any() || m_disturbances.Any())
+ return false;
+
+ double samplesPerMinute = CalculateSamplesPerMinute(m_startTime, m_endTime, m_samples);
+ return samplesPerMinute <= TrendThreshold;
+ }
+
+ private bool IsEvent()
+ {
+ if (m_disturbances.Any())
+ return true;
+
+ return m_dataSeries
+ .Where(dataSeries => (object)dataSeries.SeriesInfo != null)
+ .Where(IsInstantaneous)
+ .Where(dataSeries => dataSeries.SeriesInfo.Channel.MeasurementType.Name != "Digital")
+ .Any();
+ }
+
+ private bool IsInstantaneous(DataSeries dataSeries)
+ {
+ string characteristicName = dataSeries.SeriesInfo.Channel.MeasurementCharacteristic.Name;
+ string seriesTypeName = dataSeries.SeriesInfo.SeriesType.Name;
+
+ return (characteristicName == "Instantaneous") &&
+ (seriesTypeName == "Values" || seriesTypeName == "Instantaneous");
+ }
+
+ private bool IsFastRMS()
+ {
+ return m_dataSeries
+ .Where(dataSeries => (object)dataSeries.SeriesInfo != null)
+ .Where(IsRMS)
+ .Any();
+ }
+
+ private bool IsRMS(DataSeries dataSeries)
+ {
+ string characteristicName = dataSeries.SeriesInfo.Channel.MeasurementCharacteristic.Name;
+ string seriesTypeName = dataSeries.SeriesInfo.SeriesType.Name;
+
+ return (characteristicName == "RMS") &&
+ (seriesTypeName == "Values" || seriesTypeName == "Instantaneous");
+ }
+
+ private double CalculateSamplesPerMinute(DateTime startTime, DateTime endTime, int samples)
+ {
+ return (samples - 1) / (endTime - startTime).TotalMinutes;
+ }
+
+ #endregion
+ }
+
+ public static partial class TableOperationsExtensions
+ {
+ public static Event GetEvent(this TableOperations eventTable, FileGroup fileGroup, DataGroup dataGroup)
+ {
+ int fileGroupID = fileGroup.ID;
+ int assetID = dataGroup.Asset.ID;
+ DateTime startTime = dataGroup.StartTime;
+ DateTime endTime = dataGroup.EndTime;
+ int samples = dataGroup.Samples;
+
+ IDbDataParameter startTimeParameter = new SqlParameter()
+ {
+ ParameterName = nameof(dataGroup.StartTime),
+ DbType = DbType.DateTime2,
+ Value = startTime
+ };
+
+ IDbDataParameter endTimeParameter = new SqlParameter()
+ {
+ ParameterName = nameof(dataGroup.EndTime),
+ DbType = DbType.DateTime2,
+ Value = endTime
+ };
+
+ RecordRestriction recordRestriction =
+ new RecordRestriction("FileGroupID = {0}", fileGroupID) &
+ new RecordRestriction("AssetID = {0}", assetID) &
+ new RecordRestriction("StartTime = {0}", startTimeParameter) &
+ new RecordRestriction("EndTime = {0}", endTimeParameter) &
+ new RecordRestriction("Samples = {0}", samples);
+
+ return eventTable.QueryRecord(recordRestriction);
+ }
+ }
+}
diff --git a/src/Libraries/FaultData/DataAnalysis/DataSeries.cs b/src/Libraries/FaultData/DataAnalysis/DataSeries.cs
new file mode 100644
index 00000000..cbc7d9d1
--- /dev/null
+++ b/src/Libraries/FaultData/DataAnalysis/DataSeries.cs
@@ -0,0 +1,588 @@
+//******************************************************************************************************
+// DataSeries.cs - Gbtc
+//
+// Copyright © 2014, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 05/15/2014 - Stephen C. Wills
+// Generated original version of source code.
+// 07/09/2019 - Christoph Lackner
+// Added length property and Threshhold method.
+//
+//******************************************************************************************************
+
+using Gemstone;
+using Gemstone.Numeric.Interpolation;
+using Ionic.Zlib;
+using openXDA.Model;
+
+namespace FaultData.DataAnalysis
+{
+ ///
+ /// Represents a series of data points.
+ ///
+ public class DataSeries
+ {
+ #region [ Members ]
+
+ // Fields
+ private Series m_seriesInfo;
+ private List m_dataPoints;
+
+ private double? m_duration;
+ private double? m_sampleRate;
+ private double? m_minimum;
+ private double? m_maximum;
+ private double? m_average;
+
+ #endregion
+
+ #region [ Constructors ]
+
+ public DataSeries()
+ {
+ m_dataPoints = new List();
+ }
+
+ #endregion
+
+ #region [ Properties ]
+
+ ///
+ /// Gets or sets the configuration information
+ /// that defines the data in this series.
+ ///
+ public Series SeriesInfo
+ {
+ get
+ {
+ return m_seriesInfo;
+ }
+ set
+ {
+ m_seriesInfo = value;
+ }
+ }
+
+ ///
+ /// Gets or sets the data points that make up the series.
+ ///
+ public List DataPoints
+ {
+ get
+ {
+ return m_dataPoints;
+ }
+ set
+ {
+ m_dataPoints = value ?? new List();
+ m_duration = null;
+ m_sampleRate = null;
+ m_minimum = null;
+ m_maximum = null;
+ m_average = null;
+ }
+ }
+
+ ///
+ /// Gets the duration of the series, in seconds.
+ ///
+ public double Duration
+ {
+ get
+ {
+ if (m_duration.HasValue)
+ return m_duration.Value;
+
+ if (!m_dataPoints.Any())
+ return double.NaN;
+
+ m_duration = m_dataPoints.Last().Time.Subtract(m_dataPoints.First().Time).TotalSeconds;
+
+ return m_duration.Value;
+ }
+ }
+
+ ///
+ /// Gets the Start Time of the dataseries.
+ ///
+ public DateTime StartTime
+ {
+ get
+ {
+ if (!m_dataPoints.Any())
+ return DateTime.MinValue;
+ return m_dataPoints.First().Time;
+ }
+ }
+
+ ///
+ /// Gets the End Time of the dataseries.
+ ///
+ public DateTime EndTime
+ {
+ get
+ {
+ if (!m_dataPoints.Any())
+ return DateTime.MinValue;
+ return m_dataPoints.Last().Time;
+ }
+ }
+
+
+ ///
+ /// Gets the Length of the series, in datapoints.
+ ///
+ public int Length
+ {
+ get
+ {
+
+ if (!m_dataPoints.Any())
+ return 0;
+
+ return m_dataPoints.Count;
+ }
+ }
+
+ ///
+ /// Gets the sample rate of the series, in samples per second.
+ ///
+ public double SampleRate
+ {
+ get
+ {
+ if (m_sampleRate.HasValue)
+ return m_sampleRate.Value;
+
+ if (!m_dataPoints.Any())
+ return double.NaN;
+
+ int index = (m_dataPoints.Count > 128) ? 128 : m_dataPoints.Count - 1;
+
+ m_sampleRate = (Duration != 0.0D)
+ ? index / (m_dataPoints[index].Time - m_dataPoints[0].Time).TotalSeconds
+ : double.NaN;
+
+ return m_sampleRate.Value;
+ }
+ }
+
+ ///
+ /// Gets the maximum value in the series.
+ ///
+ public double Maximum
+ {
+ get
+ {
+ if (m_maximum.HasValue)
+ return m_maximum.Value;
+
+ if (!m_dataPoints.Any(dataPoint => !double.IsNaN(dataPoint.Value)))
+ return double.NaN;
+
+ m_maximum = m_dataPoints
+ .Select(point => point.Value)
+ .Where(value => !double.IsNaN(value))
+ .Max();
+
+ return m_maximum.Value;
+ }
+ }
+
+ ///
+ /// Gets the minimum value in the series.
+ ///
+ public double Minimum
+ {
+ get
+ {
+ if (m_minimum.HasValue)
+ return m_minimum.Value;
+
+ if (!m_dataPoints.Any(dataPoint => !double.IsNaN(dataPoint.Value)))
+ return double.NaN;
+
+ m_minimum = m_dataPoints
+ .Select(dataPoint => dataPoint.Value)
+ .Where(value => !double.IsNaN(value))
+ .Min();
+
+ return m_minimum.Value;
+ }
+ }
+
+ ///
+ /// Gets the average value in the series.
+ ///
+ public double Average
+ {
+ get
+ {
+ if (m_average.HasValue)
+ return m_average.Value;
+
+ if (!m_dataPoints.Any(dataPoint => !double.IsNaN(dataPoint.Value)))
+ return double.NaN;
+
+ m_average = m_dataPoints
+ .Select(dataPoint => dataPoint.Value)
+ .Where(value => !double.IsNaN(value))
+ .Average();
+
+ return m_average.Value;
+ }
+ }
+
+ public DataPoint this[int index]
+ {
+ get
+ {
+ return m_dataPoints[index];
+ }
+ }
+
+ ///
+ /// Flag that tells the DataGroup .ToData function not to add to data blob because this value is calculated.
+ ///
+ public bool Calculated { get; set; } = false;
+
+ #endregion
+
+ #region [ Methods ]
+
+
+ ///
+ /// Creates a new that is a subset.
+ ///
+ /// The index at which the new DataSeries starts.
+ /// The index at which the new DataSeries ends.
+ /// a new
+ public DataSeries ToSubSeries(int startIndex, int endIndex)
+ {
+ DataSeries subSeries = new DataSeries();
+ int count;
+
+ subSeries.SeriesInfo = m_seriesInfo;
+
+ if (startIndex < 0)
+ startIndex = 0;
+
+ if (endIndex >= m_dataPoints.Count)
+ endIndex = m_dataPoints.Count - 1;
+
+ count = endIndex - startIndex + 1;
+
+ if (count > 0)
+ subSeries.DataPoints = m_dataPoints.Skip(startIndex).Take(count).ToList();
+
+ return subSeries;
+ }
+
+ ///
+ /// Creates a new that is a subset.
+ ///
+ /// The index at which the new DataSeries starts.
+ /// a new
+ public DataSeries ToSubSeries(int startSeries) => ToSubSeries(startSeries, this.Length);
+
+ public DataSeries ToSubSeries(DateTime startTime, DateTime endTime)
+ {
+ DataSeries subSeries = new DataSeries();
+
+ subSeries.SeriesInfo = m_seriesInfo;
+
+ subSeries.DataPoints = m_dataPoints
+ .SkipWhile(point => point.Time < startTime)
+ .TakeWhile(point => point.Time <= endTime)
+ .ToList();
+
+ return subSeries;
+ }
+
+ ///
+ /// Creates a new that is a subset.
+ ///
+ /// The time at which the new DataSeries starts.
+ /// a new
+ public DataSeries ToSubSeries(DateTime startTime) => ToSubSeries(startTime, this[this.Length - 1].Time);
+
+ public DataSeries Shift(TimeSpan timeShift)
+ {
+ DataSeries shifted = new DataSeries();
+
+ shifted.SeriesInfo = m_seriesInfo;
+
+ shifted.DataPoints = m_dataPoints
+ .Select(dataPoint => dataPoint.Shift(timeShift))
+ .ToList();
+
+ return shifted;
+ }
+
+ public DataSeries Negate()
+ {
+ DataSeries negatedDataSeries = new DataSeries();
+
+ negatedDataSeries.DataPoints = m_dataPoints
+ .Select(point => point.Negate())
+ .ToList();
+
+ return negatedDataSeries;
+ }
+
+ public DataSeries Add(DataSeries operand)
+ {
+ DataSeries sum = new DataSeries();
+
+ if (m_dataPoints.Count != operand.DataPoints.Count)
+ throw new InvalidOperationException("Cannot take the sum of series with mismatched time values");
+
+ sum.DataPoints = m_dataPoints
+ .Zip(operand.DataPoints, Add)
+ .ToList();
+
+ return sum;
+ }
+
+ public DataSeries Subtract(DataSeries operand)
+ {
+ return Add(operand.Negate());
+ }
+
+ public DataSeries Multiply(double value)
+ {
+ DataSeries result = new DataSeries();
+
+ result.DataPoints = m_dataPoints
+ .Select(point => point.Multiply(value))
+ .ToList();
+
+ return result;
+ }
+
+ public DataSeries Copy()
+ {
+ return Multiply(1.0D);
+ }
+
+ public int Threshhold(double value)
+ {
+ return m_dataPoints.FindIndex(x => x.LargerThan(value));
+ }
+
+ ///
+ /// Downsamples the current DataSeries to requested sample count, if the
+ ///
+ ///
+ public void Downsample(int maxSampleCount)
+ {
+ // don't actually downsample, if it doesn't need it.
+ if (DataPoints.Count <= maxSampleCount) return;
+
+ DateTime epoch = new DateTime(1970, 1, 1);
+ double startTime = StartTime.Subtract(epoch).TotalMilliseconds;
+ double endTime = EndTime.Subtract(epoch).TotalMilliseconds;
+ List data = new List();
+
+ // milliseconds per returned sampled size
+ int step = (int)(Duration*1000) / maxSampleCount;
+ if (step < 1)
+ step = 1;
+
+ int index = 0;
+ for (double n = startTime * 1000; n <= endTime * 1000; n += 2 * step)
+ {
+ DataPoint min = null;
+ DataPoint max = null;
+
+ while (index < DataPoints.Count() && DataPoints[index].Time.Subtract(epoch).TotalMilliseconds * 1000 < n + 2 * step)
+ {
+ if (min == null || min.Value > DataPoints[index].Value)
+ min = DataPoints[index];
+
+ if (max == null || max.Value <= DataPoints[index].Value)
+ max = DataPoints[index];
+
+ ++index;
+ }
+
+ if (min != null)
+ {
+ if (min.Time < max.Time)
+ {
+ data.Add(min);
+ data.Add(max);
+ }
+ else if (min.Time > max.Time)
+ {
+ data.Add(max);
+ data.Add(min);
+ }
+ else
+ {
+ data.Add(min);
+ }
+ }
+ }
+ DataPoints = data;
+ }
+
+ ///
+ /// Upsamples the current DataSeries to requested sample count, assuming the requested rate is larger than the current
+ ///
+ ///
+ public void Upsample(int minSamplesPerCycle, double systemFrequency)
+ {
+ // don't actually upsample, if it doesn't need it.
+ if (minSamplesPerCycle <= 0)
+ return;
+ TimeSpan duration = EndTime - StartTime;
+ double cycles = duration.TotalSeconds * systemFrequency;
+ int minSampleCount = (int)Math.Round(cycles * minSamplesPerCycle);
+ if (minSampleCount <= DataPoints.Count)
+ return;
+
+ // Creating spline fit to perform upsampling
+ List xValues = DataPoints
+ .Select(point => (double) point.Time.Subtract(StartTime).Ticks)
+ .ToList();
+ List yValues= DataPoints
+ .Select(point => point.Value)
+ .ToList();
+ SplineFit splineFit = SplineFit.ComputeCubicSplines(xValues, yValues);
+
+ List data = Enumerable
+ .Range(0, minSampleCount)
+ .Select(sample => sample * duration.Ticks / minSampleCount)
+ .Select(sampleTicks =>
+ new DataPoint()
+ {
+ Time = StartTime.AddTicks(sampleTicks),
+ Value = splineFit.CalculateY(sampleTicks)
+ }
+ ).ToList();
+
+ DataPoints = data;
+ }
+
+ #endregion
+
+ #region [ Static ]
+
+ // Static Methods
+
+ public static DataSeries Merge(IEnumerable dataSeriesList)
+ {
+ if (dataSeriesList == null)
+ throw new ArgumentNullException(nameof(dataSeriesList));
+
+ DataSeries mergedSeries = new DataSeries();
+ DateTime lastTime = default(DateTime);
+
+ IEnumerable dataPoints = dataSeriesList
+ .Where(dataSeries => dataSeries != null)
+ .Where(dataSeries => dataSeries.DataPoints.Count != 0)
+ .OrderBy(dataSeries => dataSeries[0].Time)
+ .SelectMany(series => series.DataPoints);
+
+ foreach (DataPoint next in dataPoints)
+ {
+ if (mergedSeries.DataPoints.Count == 0 || next.Time > lastTime)
+ {
+ mergedSeries.DataPoints.Add(next);
+ lastTime = next.Time;
+ }
+ }
+
+ return mergedSeries;
+ }
+
+ private static DataPoint Add(DataPoint point1, DataPoint point2)
+ {
+ return point1.Add(point2);
+ }
+
+ public static DataSeries FromData(Meter meter, byte[] data)
+ {
+
+ if (data == null)
+ return null;
+
+ // Restore the GZip header before uncompressing
+ data[0] = 0x1F;
+ data[1] = 0x8B;
+
+ byte[] uncompressedData = GZipStream.UncompressBuffer(data);
+ int offset = 0;
+
+ int samples = LittleEndian.ToInt32(uncompressedData, offset);
+ offset += sizeof(int);
+
+ List times = new List();
+
+ while (times.Count < samples)
+ {
+ int timeValues = LittleEndian.ToInt32(uncompressedData, offset);
+ offset += sizeof(int);
+
+ long currentValue = LittleEndian.ToInt64(uncompressedData, offset);
+ offset += sizeof(long);
+ times.Add(new DateTime(currentValue));
+
+ for (int i = 1; i < timeValues; i++)
+ {
+ currentValue += LittleEndian.ToUInt16(uncompressedData, offset);
+ offset += sizeof(ushort);
+ times.Add(new DateTime(currentValue));
+ }
+ }
+
+ DataSeries dataSeries = new DataSeries();
+ int seriesID = LittleEndian.ToInt32(uncompressedData, offset);
+ offset += sizeof(int);
+
+ if (seriesID > 0 && !(meter is null))
+ dataSeries.SeriesInfo = meter.Series.FirstOrDefault(s => s.ID == seriesID);
+
+ const ushort NaNValue = ushort.MaxValue;
+ double decompressionOffset = LittleEndian.ToDouble(uncompressedData, offset);
+ double decompressionScale = LittleEndian.ToDouble(uncompressedData, offset + sizeof(double));
+ offset += 2 * sizeof(double);
+
+ for (int i = 0; i < samples; i++)
+ {
+ ushort compressedValue = LittleEndian.ToUInt16(uncompressedData, offset);
+ offset += sizeof(ushort);
+
+ double decompressedValue = decompressionScale * compressedValue + decompressionOffset;
+
+ if (compressedValue == NaNValue)
+ decompressedValue = double.NaN;
+
+ dataSeries.DataPoints.Add(new DataPoint()
+ {
+ Time = times[i],
+ Value = decompressedValue
+ });
+ }
+
+ return dataSeries;
+
+ }
+
+ #endregion
+ }
+}
diff --git a/src/Libraries/FaultData/DataAnalysis/ReportedDisturbance.cs b/src/Libraries/FaultData/DataAnalysis/ReportedDisturbance.cs
new file mode 100644
index 00000000..a078eb5b
--- /dev/null
+++ b/src/Libraries/FaultData/DataAnalysis/ReportedDisturbance.cs
@@ -0,0 +1,58 @@
+//******************************************************************************************************
+// ReportedDisturbance.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 12/06/2017 - Stephen C. Wills
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using Gemstone;
+using Gemstone.PQDIF.Logical;
+
+namespace FaultData.DataAnalysis
+{
+ public class ReportedDisturbance
+ {
+ public ReportedDisturbance(Phase phase, DateTime time, double max, double min, double avg, TimeSpan duration, QuantityUnits units)
+ {
+ Phase = phase;
+ Time = time;
+ Maximum = max;
+ Minimum = min;
+ Average = avg;
+ Duration = duration;
+ Units = units;
+ }
+
+ public Phase Phase { get; }
+ public DateTime Time { get; }
+ public double Maximum { get; }
+ public double Minimum { get; }
+ public double Average { get; }
+ public TimeSpan Duration { get; }
+ public QuantityUnits Units { get; }
+
+ public ReportedDisturbance ShiftTimestampTo(DateTime shiftedTime) =>
+ new ReportedDisturbance(Phase, shiftedTime, Maximum, Minimum, Average, Duration, Units);
+
+ public Range ToRange()
+ {
+ return new Range(Time, Time + Duration);
+ }
+ }
+}
diff --git a/src/Libraries/FaultData/DataAnalysis/Transform.cs b/src/Libraries/FaultData/DataAnalysis/Transform.cs
new file mode 100644
index 00000000..9e4aa28d
--- /dev/null
+++ b/src/Libraries/FaultData/DataAnalysis/Transform.cs
@@ -0,0 +1,358 @@
+//******************************************************************************************************
+// Transform.cs - Gbtc
+//
+// Copyright © 2014, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/28/2014 - Stephen C. Wills
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using Gemstone.Numeric.Analysis;
+using openXDA.Model;
+
+namespace FaultData.DataAnalysis
+{
+ public static class Transform
+ {
+ public static DataGroup Combine(params DataGroup[] dataGroups)
+ {
+ DataGroup combination = new DataGroup();
+
+ foreach (DataGroup dataGroup in dataGroups)
+ {
+ foreach (DataSeries dataSeries in dataGroup.DataSeries)
+ combination.Add(dataSeries);
+ }
+
+ return combination;
+ }
+
+ public static VICycleDataGroup ToVICycleDataGroup(VIDataGroup dataGroup, double frequency, bool compress = false)
+ {
+ DataSeries[] cycleSeries = dataGroup.Data;
+
+ return new VICycleDataGroup(cycleSeries
+ .Where(dataSeries => (object)dataSeries != null)
+ .Select(dataSeries => ToCycleDataGroup(dataSeries, frequency, compress))
+ .ToList(), dataGroup.Asset);
+ }
+
+ public static CycleDataGroup ToCycleDataGroup(DataSeries dataSeries, double frequency, bool compress=false)
+ {
+ if (dataSeries is null)
+ return null;
+
+ DataSeries rmsSeries = new DataSeries();
+ DataSeries phaseSeries = new DataSeries();
+ DataSeries peakSeries = new DataSeries();
+ DataSeries errorSeries = new DataSeries();
+
+ // Set series info to the source series info
+ rmsSeries.SeriesInfo = dataSeries.SeriesInfo;
+ phaseSeries.SeriesInfo = dataSeries.SeriesInfo;
+ peakSeries.SeriesInfo = dataSeries.SeriesInfo;
+ errorSeries.SeriesInfo = dataSeries.SeriesInfo;
+
+ // Get samples per cycle of the data series based on the given frequency
+ int samplesPerCycle = CalculateSamplesPerCycle(dataSeries, frequency);
+
+ //preinitialize size of SeriesInfo
+ int ncycleData = dataSeries.DataPoints.Count - samplesPerCycle + 1;
+
+ if (ncycleData <= 0)
+ return null;
+
+ rmsSeries.DataPoints.Capacity = ncycleData;
+ phaseSeries.DataPoints.Capacity = ncycleData;
+ peakSeries.DataPoints.Capacity = ncycleData;
+ errorSeries.DataPoints.Capacity = ncycleData;
+
+ // Initialize arrays of y-values and t-values for calculating cycle data as necessary
+ double[] yValues = new double[samplesPerCycle];
+ double[] tValues = new double[samplesPerCycle];
+
+ void CaptureCycle(int cycleIndex)
+ {
+ DateTime startTime = dataSeries.DataPoints[0].Time;
+
+ for (int i = 0; i < samplesPerCycle; i++)
+ {
+ DateTime time = dataSeries.DataPoints[cycleIndex + i].Time;
+ double value = dataSeries.DataPoints[cycleIndex + i].Value;
+ tValues[i] = time.Subtract(startTime).TotalSeconds;
+ yValues[i] = value;
+ }
+ }
+
+ // Obtain a list of time gaps in the data series
+ List gapIndexes = Enumerable.Range(0, dataSeries.DataPoints.Count - 1)
+ .Where(index =>
+ {
+ DataPoint p1 = dataSeries[index];
+ DataPoint p2 = dataSeries[index + 1];
+ double cycleDiff = (p2.Time - p1.Time).TotalSeconds * frequency;
+
+ // Detect gaps larger than a quarter cycle.
+ // Tolerance of 0.000062 calculated
+ // assuming 3.999 samples per cycle
+ return (cycleDiff > 0.250062);
+ })
+ .ToList();
+
+ double sum = 0;
+
+ if (dataSeries.DataPoints.Count >= samplesPerCycle)
+ {
+ CaptureCycle(0);
+ sum = yValues.Sum(y => y * y);
+
+ DateTime cycleTime = dataSeries.DataPoints[0].Time;
+ SineWave sineFit = WaveFit.SineFit(yValues, tValues, frequency);
+ double phase = sineFit.Phase;
+
+ double ComputeSineError() => tValues
+ .Select(sineFit.CalculateY)
+ .Zip(yValues, (estimate, value) => Math.Abs(estimate - value))
+ .Sum();
+
+ double sineError = ComputeSineError();
+ double previousSineError = sineError;
+
+ rmsSeries.DataPoints.Add(new DataPoint()
+ {
+ Time = cycleTime,
+ Value = Math.Sqrt(sum / samplesPerCycle)
+ });
+
+ phaseSeries.DataPoints.Add(new DataPoint()
+ {
+ Time = cycleTime,
+ Value = phase
+ });
+
+ peakSeries.DataPoints.Add(new DataPoint()
+ {
+ Time = cycleTime,
+ Value = sineFit.Amplitude
+ });
+
+ errorSeries.DataPoints.Add(new DataPoint()
+ {
+ Time = cycleTime,
+ Value = sineError
+ });
+
+ // Reduce RMS to max 2 pt per cycle to get half cycle RMS
+ int step = 1;
+ if (compress)
+ step = (int)Math.Floor(samplesPerCycle / 2.0D);
+ if (step == 0)
+ step = 1;
+
+ for (int cycleIndex = step; cycleIndex < dataSeries.DataPoints.Count - samplesPerCycle + 1; cycleIndex += step)
+ {
+ for (int j = 0; j < step; j++)
+ {
+ int oldIndex = cycleIndex - step + j;
+ int newIndex = oldIndex + samplesPerCycle;
+ double oldValue = dataSeries.DataPoints[oldIndex].Value;
+ double newValue = dataSeries.DataPoints[newIndex].Value;
+ sum += newValue * newValue - oldValue * oldValue;
+ }
+
+ // If the cycle following i contains a data gap, do not calculate cycle data
+ if (gapIndexes.Any(index => cycleIndex <= index && (cycleIndex + samplesPerCycle - 1) > index))
+ continue;
+
+ phase += 2 * Math.PI * frequency * (dataSeries.DataPoints[cycleIndex].Time - cycleTime).TotalSeconds;
+
+ // Use the time of the first data point in the cycle as the time of the cycle
+ cycleTime = dataSeries.DataPoints[cycleIndex].Time;
+
+ CaptureCycle(cycleIndex);
+
+ if (compress)
+ sineError = ComputeSineError();
+
+ if (!compress || Math.Abs(previousSineError - sineError) > sineError * 0.0001)
+ {
+ sineFit = WaveFit.SineFit(yValues, tValues, frequency);
+ phase = sineFit.Phase;
+ sineError = ComputeSineError();
+ }
+
+ previousSineError = sineError;
+
+ rmsSeries.DataPoints.Add(new DataPoint()
+ {
+ Time = cycleTime,
+ Value = Math.Sqrt(sum / samplesPerCycle)
+ });
+
+ phaseSeries.DataPoints.Add(new DataPoint()
+ {
+ Time = cycleTime,
+ Value = phase
+ });
+
+ peakSeries.DataPoints.Add(new DataPoint()
+ {
+ Time = cycleTime,
+ Value = sineFit.Amplitude
+ });
+
+ errorSeries.DataPoints.Add(new DataPoint()
+ {
+ Time = cycleTime,
+ Value = sineError
+ });
+ }
+ }
+
+ // Add a series to the data group for each series of cycle data
+ DataGroup dataGroup = new DataGroup();
+ dataGroup.Add(rmsSeries);
+ dataGroup.Add(phaseSeries);
+ dataGroup.Add(peakSeries);
+ dataGroup.Add(errorSeries);
+
+ return new CycleDataGroup(dataGroup, dataSeries.SeriesInfo.Channel.Asset);
+ }
+
+ public static DataSeries ToRMS(DataSeries dataSeries, double frequency, bool compress = false)
+ {
+ DataSeries rmsSeries = new DataSeries();
+
+ int samplesPerCycle;
+ double[] yValues;
+ double[] tValues;
+ double sum;
+
+ DateTime cycleTime;
+
+ if ((object)dataSeries == null)
+ return null;
+
+ // Set series info to the source series info
+ rmsSeries.SeriesInfo = dataSeries.SeriesInfo;
+
+
+ // Get samples per cycle of the data series based on the given frequency
+ samplesPerCycle = Transform.CalculateSamplesPerCycle(dataSeries, frequency);
+
+ //preinitialize size of SeriesInfo
+ int ncycleData = dataSeries.DataPoints.Count - samplesPerCycle;
+ rmsSeries.DataPoints = new List(ncycleData);
+
+
+
+ // Initialize arrays of y-values and t-values for calculating cycle data as necessary
+ yValues = new double[samplesPerCycle];
+ tValues = new double[samplesPerCycle];
+
+ // Obtain a list of time gaps in the data series
+ List gapIndexes = Enumerable.Range(0, dataSeries.DataPoints.Count - 1)
+ .Where(index =>
+ {
+ DataPoint p1 = dataSeries[index];
+ DataPoint p2 = dataSeries[index + 1];
+ double cycleDiff = (p2.Time - p1.Time).TotalSeconds * frequency;
+
+ // Detect gaps larger than a quarter cycle.
+ // Tolerance of 0.000062 calculated
+ // assuming 3.999 samples per cycle
+ return (cycleDiff > 0.250062);
+ })
+ .ToList();
+
+ sum = 0;
+
+ if (dataSeries.DataPoints.Count > samplesPerCycle)
+ {
+ sum = dataSeries.DataPoints.Take(samplesPerCycle).Sum(pt => pt.Value * pt.Value);
+
+ rmsSeries.DataPoints.Add(new DataPoint()
+ {
+ Time = dataSeries.DataPoints[0].Time,
+ Value = Math.Sqrt(sum / samplesPerCycle)
+ });
+
+ cycleTime = dataSeries.DataPoints[0].Time;
+
+ // Reduce RMS to max 2 pt per cycle to get half cycle RMS
+ int step = 1;
+ if (compress)
+ step = (int)Math.Floor(samplesPerCycle / 2.0D);
+ if (step == 0)
+ step = 1;
+
+ for (int i = step; i < dataSeries.DataPoints.Count - samplesPerCycle; i = i + step)
+ {
+
+ for (int j = 0; j < step; j++)
+ {
+ sum = sum - dataSeries.DataPoints[i - step + j].Value * dataSeries.DataPoints[i - step + j].Value;
+ sum = sum + dataSeries.DataPoints[i - step + j + samplesPerCycle].Value * dataSeries.DataPoints[i - step + j + samplesPerCycle].Value;
+ }
+
+ // If the cycle following i contains a data gap, do not calculate cycle data
+ if (gapIndexes.Any(index => i <= index && (i + samplesPerCycle - 1) > index))
+ continue;
+
+ // Use the time of the first data point in the cycle as the time of the cycle
+ cycleTime = dataSeries.DataPoints[i].Time;
+
+ rmsSeries.DataPoints.Add(new DataPoint()
+ {
+ Time = cycleTime,
+ Value = Math.Sqrt(sum / samplesPerCycle)
+ });
+
+ }
+ }
+
+ return rmsSeries;
+ }
+
+ public static List ToValues(DataSeries series)
+ {
+ return series.DataPoints
+ .Select(dataPoint => dataPoint.Value)
+ .ToList();
+ }
+
+ public static int CalculateSamplesPerCycle(DataSeries dataSeries, double frequency)
+ {
+ return CalculateSamplesPerCycle(dataSeries.SampleRate, frequency);
+ }
+
+ public static int CalculateSamplesPerCycle(double samplesPerSecond, double frequency)
+ {
+ int[] commonSampleRates =
+ {
+ 4, 8, 16, 32,
+ 80, 96, 100, 200,
+ 64, 128, 256, 512, 1024
+ };
+
+ int calculatedRate = (int)Math.Round(samplesPerSecond / frequency);
+ int nearestCommonRate = commonSampleRates.MinBy(rate => Math.Abs(calculatedRate - rate));
+ int diff = Math.Abs(calculatedRate - nearestCommonRate);
+ return (diff < nearestCommonRate * 0.1D) ? nearestCommonRate : calculatedRate;
+ }
+ }
+}
diff --git a/src/Libraries/FaultData/DataAnalysis/VICycleDataGroup.cs b/src/Libraries/FaultData/DataAnalysis/VICycleDataGroup.cs
new file mode 100644
index 00000000..d913f052
--- /dev/null
+++ b/src/Libraries/FaultData/DataAnalysis/VICycleDataGroup.cs
@@ -0,0 +1,476 @@
+//******************************************************************************************************
+// VICycleDataGroup.cs - Gbtc
+//
+// Copyright © 2014, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/29/2014 - Stephen C. Wills
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using FaultAlgorithms;
+using openXDA.Model;
+
+namespace FaultData.DataAnalysis
+{
+ public class VICycleDataGroup
+ {
+ #region [ Members ]
+
+ // Fields
+ private List m_vIndices;
+ private Asset m_asset;
+
+ private int m_iaIndex;
+ private int m_ibIndex;
+ private int m_icIndex;
+ private int m_irIndex;
+
+ private List m_cycleDataGroups;
+
+ private class VIndices
+ {
+ public int Va;
+ public int Vb;
+ public int Vc;
+
+ public int Vab;
+ public int Vbc;
+ public int Vca;
+
+ public int distance;
+ public VIndices()
+ {
+ Va = -1;
+ Vb = -1;
+ Vc = -1;
+ Vab = -1;
+ Vbc = -1;
+ Vca = -1;
+
+ distance = -1;
+ }
+
+ public int DefinedNeutralVoltages
+ {
+ get
+ {
+ return ((Va > -1) ? 1 : 0) + ((Vb > -1) ? 1 : 0) + ((Vc > -1) ? 1 : 0);
+ }
+ }
+
+ public int DefinedLineVoltages
+ {
+ get
+ {
+ return ((Vab > -1) ? 1 : 0) + ((Vbc > -1) ? 1 : 0) + ((Vca > -1) ? 1 : 0);
+ }
+ }
+
+ public bool allVoltagesDefined
+ {
+ get
+ {
+ return ((Vab > -1) && (Vbc > -1) && (Vca > -1) &&
+ (Va > -1) && (Vb > -1) && (Vc > -1));
+ }
+ }
+
+ }
+
+
+ public double VBase => m_asset.VoltageKV;
+
+ #endregion
+
+ #region [ Constructors ]
+
+ public VICycleDataGroup(DataGroup dataGroup)
+ {
+ m_vIndices = new List();
+ m_asset = dataGroup.Asset;
+
+ m_cycleDataGroups = dataGroup.DataSeries
+ .Select((dataSeries, index) => new { DataSeries = dataSeries, Index = index })
+ .GroupBy(obj => obj.Index / 4)
+ .Where(grouping => grouping.Count() >= 4)
+ .Select(grouping => grouping.Select(obj => obj.DataSeries))
+ .Select(grouping => new CycleDataGroup(new DataGroup(grouping, dataGroup.Asset), dataGroup.Asset))
+ .ToList();
+
+ MapIndexes();
+ }
+
+ public VICycleDataGroup(List cycleDataGroups, Asset asset)
+ {
+ m_vIndices = new List();
+ m_cycleDataGroups = new List(cycleDataGroups);
+ m_asset = asset;
+ MapIndexes();
+ }
+
+ #endregion
+
+ #region [ Properties ]
+
+ public CycleDataGroup VA
+ {
+ get
+ {
+ return (m_vIndices.Count > 0 && m_vIndices[0].Va >= 0) ? m_cycleDataGroups[m_vIndices[0].Va] : null;
+ }
+ }
+
+ public CycleDataGroup VB
+ {
+ get
+ {
+ return (m_vIndices.Count > 0 && m_vIndices[0].Vb >= 0) ? m_cycleDataGroups[m_vIndices[0].Vb] : null;
+ }
+ }
+
+ public CycleDataGroup VC
+ {
+ get
+ {
+ return (m_vIndices.Count > 0 && m_vIndices[0].Vc >= 0) ? m_cycleDataGroups[m_vIndices[0].Vc] : null;
+ }
+ }
+
+ public CycleDataGroup VAB
+ {
+ get
+ {
+ return (m_vIndices.Count > 0 && m_vIndices[0].Vab >= 0) ? m_cycleDataGroups[m_vIndices[0].Vab] : null;
+ }
+ }
+
+ public CycleDataGroup VBC
+ {
+ get
+ {
+ return (m_vIndices.Count > 0 && m_vIndices[0].Vbc >= 0) ? m_cycleDataGroups[m_vIndices[0].Vbc] : null;
+ }
+ }
+
+ public CycleDataGroup VCA
+ {
+ get
+ {
+ return (m_vIndices.Count > 0 && m_vIndices[0].Vca >= 0) ? m_cycleDataGroups[m_vIndices[0].Vca] : null;
+ }
+ }
+
+ public CycleDataGroup IA
+ {
+ get
+ {
+ return (m_iaIndex >= 0) ? m_cycleDataGroups[m_iaIndex] : null;
+ }
+ }
+
+ public CycleDataGroup IB
+ {
+ get
+ {
+ return (m_ibIndex >= 0) ? m_cycleDataGroups[m_ibIndex] : null;
+ }
+ }
+
+ public CycleDataGroup IC
+ {
+ get
+ {
+ return (m_icIndex >= 0) ? m_cycleDataGroups[m_icIndex] : null;
+ }
+ }
+
+ public CycleDataGroup IR
+ {
+ get
+ {
+ return (m_irIndex >= 0) ? m_cycleDataGroups[m_irIndex] : null;
+ }
+ }
+
+ public List CycleDataGroups {
+ get {
+ return m_cycleDataGroups;
+ }
+ }
+
+ #endregion
+
+ #region [ Methods ]
+
+ public DataGroup ToDataGroup()
+ {
+ return Transform.Combine(m_cycleDataGroups
+ .Select(cycleDataGroup => cycleDataGroup.ToDataGroup())
+ .ToArray());
+ }
+
+ public VICycleDataGroup ToSubSet(int startIndex, int endIndex)
+ {
+ return new VICycleDataGroup(m_cycleDataGroups
+ .Select(cycleDataGroup => cycleDataGroup.ToSubGroup(startIndex, endIndex))
+ .ToList(), m_asset);
+ }
+
+ public VICycleDataGroup ToSubSet(DateTime startTime, DateTime endTime)
+ {
+ return new VICycleDataGroup(m_cycleDataGroups
+ .Select(cycleDataGroup => cycleDataGroup.ToSubGroup(startTime, endTime))
+ .ToList(), m_asset);
+ }
+
+ public void PushDataTo(CycleDataSet cycleDataSet)
+ {
+ FaultAlgorithms.CycleData cycleData;
+ Cycle[] cycles;
+ CycleDataGroup[] cycleDataGroups;
+
+ cycleDataGroups = new CycleDataGroup[] { VA, VB, VC, IA, IB, IC };
+ cycles = new Cycle[cycleDataGroups.Length];
+
+ for (int i = 0; i < VA.ToDataGroup().Samples; i++)
+ {
+ cycleData = new FaultAlgorithms.CycleData();
+
+ cycles[0] = cycleData.AN.V;
+ cycles[1] = cycleData.BN.V;
+ cycles[2] = cycleData.CN.V;
+ cycles[3] = cycleData.AN.I;
+ cycles[4] = cycleData.BN.I;
+ cycles[5] = cycleData.CN.I;
+
+ for (int j = 0; j < cycles.Length; j++)
+ {
+ if (cycleDataGroups[j] == null)
+ continue;
+
+ cycles[j].RMS = cycleDataGroups[j].RMS[i].Value;
+ cycles[j].Phase = cycleDataGroups[j].Phase[i].Value;
+ cycles[j].Peak = cycleDataGroups[j].Peak[i].Value;
+ cycles[j].Error = cycleDataGroups[j].Error[i].Value;
+ }
+
+ cycleDataSet[i] = cycleData;
+ }
+ }
+
+ private void MapIndexes()
+ {
+
+ m_iaIndex = -1;
+ m_ibIndex = -1;
+ m_icIndex = -1;
+ m_irIndex = -1;
+
+ List vaIndices = new List();
+ List vbIndices = new List();
+ List vcIndices = new List();
+ List vabIndices = new List();
+ List vbcIndices = new List();
+ List vcaIndices = new List();
+
+ for (int i = 0; i < m_cycleDataGroups.Count; i++)
+ {
+ if (isVoltage("AN", m_cycleDataGroups[i]))
+ vaIndices.Add(i);
+ else if (isVoltage("BN", m_cycleDataGroups[i]))
+ vbIndices.Add(i);
+ else if (isVoltage("CN", m_cycleDataGroups[i]))
+ vcIndices.Add(i);
+ else if (isVoltage("AB", m_cycleDataGroups[i]))
+ vabIndices.Add(i);
+ else if (isVoltage("BC", m_cycleDataGroups[i]))
+ vbcIndices.Add(i);
+ else if (isVoltage("CA", m_cycleDataGroups[i]))
+ vcaIndices.Add(i);
+
+ }
+
+ //Walk through all Va and try to get corresponding Vb and Vc...
+ List ProcessedIndices = new List();
+ foreach (int? VaIndex in vaIndices)
+ {
+ int assetID = m_cycleDataGroups[(int)VaIndex].Asset.ID;
+
+ int VbIndex = vbIndices.Cast().FirstOrDefault(i => m_cycleDataGroups[(int)i].Asset.ID == assetID && !ProcessedIndices.Contains(i)) ?? -1;
+ int VcIndex = vcIndices.Cast().FirstOrDefault(i => m_cycleDataGroups[(int)i].Asset.ID == assetID && !ProcessedIndices.Contains(i)) ?? -1;
+ int VabIndex = vabIndices.Cast().FirstOrDefault(i => m_cycleDataGroups[(int)i].Asset.ID == assetID && !ProcessedIndices.Contains(i)) ?? -1;
+ int VbcIndex = vbcIndices.Cast().FirstOrDefault(i => m_cycleDataGroups[(int)i].Asset.ID == assetID && !ProcessedIndices.Contains(i)) ?? -1;
+ int VcaIndex = vcaIndices.Cast().FirstOrDefault(i => m_cycleDataGroups[(int)i].Asset.ID == assetID && !ProcessedIndices.Contains(i)) ?? -1;
+
+ VIndices set = new VIndices();
+ ProcessedIndices.Add(VaIndex);
+ set.Va = (int)VaIndex;
+
+ if (VbIndex > -1)
+ {
+ ProcessedIndices.Add(VbIndex);
+ set.Vb = VbIndex;
+ }
+ if (VcIndex > -1)
+ {
+ ProcessedIndices.Add(VcIndex);
+ set.Vc = VcIndex;
+ }
+
+ if (VabIndex > -1)
+ {
+ ProcessedIndices.Add(VabIndex);
+ set.Vab = VabIndex;
+ }
+ if (VbcIndex > -1)
+ {
+ ProcessedIndices.Add(VbcIndex);
+ set.Vbc = VbcIndex;
+ }
+ if (VcaIndex > -1)
+ {
+ ProcessedIndices.Add(VcaIndex);
+ set.Vca = VcaIndex;
+ }
+
+
+ if (assetID == m_asset.ID)
+ {
+ set.distance = 0;
+ }
+ else
+ {
+ set.distance = m_asset.DistanceToAsset(assetID);
+ }
+
+ m_vIndices.Add(set);
+ }
+
+ // Also walk though all Vab to catch Leftover Cases where Va is not present
+ foreach (int? VabIndex in vabIndices)
+ {
+ int assetID = m_cycleDataGroups[(int)VabIndex].Asset.ID;
+
+ int VaIndex = vaIndices.Cast().FirstOrDefault(i => m_cycleDataGroups[(int)i].Asset.ID == assetID && !ProcessedIndices.Contains(i)) ?? -1;
+ int VbIndex = vbIndices.Cast().FirstOrDefault(i => m_cycleDataGroups[(int)i].Asset.ID == assetID && !ProcessedIndices.Contains(i)) ?? -1;
+ int VcIndex = vcIndices.Cast().FirstOrDefault(i => m_cycleDataGroups[(int)i].Asset.ID == assetID && !ProcessedIndices.Contains(i)) ?? -1;
+
+ int VbcIndex = vbcIndices.Cast().FirstOrDefault(i => m_cycleDataGroups[(int)i].Asset.ID == assetID && !ProcessedIndices.Contains(i)) ?? -1;
+ int VcaIndex = vcaIndices.Cast().FirstOrDefault(i => m_cycleDataGroups[(int)i].Asset.ID == assetID && !ProcessedIndices.Contains(i)) ?? -1;
+
+ VIndices set = new VIndices();
+ ProcessedIndices.Add(VabIndex);
+ set.Vab = (int)VabIndex;
+
+ if (VbIndex > -1)
+ {
+ ProcessedIndices.Add(VbIndex);
+ set.Vb = VbIndex;
+ }
+ if (VcIndex > -1)
+ {
+ ProcessedIndices.Add(VcIndex);
+ set.Vc = VcIndex;
+ }
+
+ if (VaIndex > -1)
+ {
+ ProcessedIndices.Add(VaIndex);
+ set.Va = VaIndex;
+ }
+ if (VbcIndex > -1)
+ {
+ ProcessedIndices.Add(VbcIndex);
+ set.Vbc = VbcIndex;
+ }
+ if (VcaIndex > -1)
+ {
+ ProcessedIndices.Add(VcaIndex);
+ set.Vca = VcaIndex;
+ }
+
+
+ if (assetID == m_asset.ID)
+ {
+ set.distance = 0;
+ }
+ else
+ {
+ set.distance = m_asset.DistanceToAsset(assetID);
+ }
+
+ m_vIndices.Add(set);
+ }
+
+ for (int i = 0; i < m_cycleDataGroups.Count; i++)
+ {
+ string measurementType = m_cycleDataGroups[i].RMS.SeriesInfo.Channel.MeasurementType.Name;
+ string phase = m_cycleDataGroups[i].RMS.SeriesInfo.Channel.Phase.Name;
+
+
+ if (measurementType == "Current" && phase == "AN")
+ m_iaIndex = i;
+ else if (measurementType == "Current" && phase == "BN")
+ m_ibIndex = i;
+ else if (measurementType == "Current" && phase == "CN")
+ m_icIndex = i;
+ else if (measurementType == "Current" && phase == "RES")
+ m_irIndex = i;
+ }
+ }
+
+ #endregion
+
+ #region [ Static ]
+
+ // Static Methods
+
+ private static bool isVoltage(string phase, CycleDataGroup dataGroup)
+ {
+
+ string measurementType = dataGroup.RMS.SeriesInfo.Channel.MeasurementType.Name;
+ string seriesPhase = dataGroup.RMS.SeriesInfo.Channel.Phase.Name;
+
+ if (measurementType != "Voltage")
+ return false;
+
+ if (seriesPhase != phase)
+ return false;
+
+ return true;
+
+ }
+
+ private static bool isCurrent(string phase, CycleDataGroup dataGroup)
+ {
+ string measurementType = dataGroup.RMS.SeriesInfo.Channel.MeasurementType.Name;
+ string seriesPhase = dataGroup.RMS.SeriesInfo.Channel.Phase.Name;
+
+ if (measurementType != "Current")
+ return false;
+
+ if (seriesPhase != phase)
+ return false;
+
+ return true;
+
+ }
+
+ #endregion
+
+ }
+}
diff --git a/src/Libraries/FaultData/DataAnalysis/VIDataGroup.cs b/src/Libraries/FaultData/DataAnalysis/VIDataGroup.cs
new file mode 100644
index 00000000..ae088e05
--- /dev/null
+++ b/src/Libraries/FaultData/DataAnalysis/VIDataGroup.cs
@@ -0,0 +1,521 @@
+//******************************************************************************************************
+// VIDataGroup.cs - Gbtc
+//
+// Copyright © 2014, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/29/2014 - Stephen C. Wills
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using Gemstone.Data;
+using openXDA.Model;
+
+namespace FaultData.DataAnalysis
+{
+ public class VIDataGroup
+ {
+ #region [ Members ]
+
+ // Fields
+ private List m_vIndices;
+
+ private int m_iaIndex;
+ private int m_ibIndex;
+ private int m_icIndex;
+ private int m_irIndex;
+
+ private DataGroup m_dataGroup;
+
+ private class VIndices
+ {
+ public int Va { get; set; } = -1;
+ public int Vb { get; set; } = -1;
+ public int Vc { get; set; } = -1;
+
+ public int Vab { get; set; } = -1;
+ public int Vbc { get; set; } = -1;
+ public int Vca { get; set; } = -1;
+
+ public int Distance { get; set; } = -1;
+
+ public int DefinedNeutralVoltages =>
+ (Va >= 0 ? 1 : 0) +
+ (Vb >= 0 ? 1 : 0) +
+ (Vc >= 0 ? 1 : 0);
+
+ public int DefinedLineVoltages =>
+ (Vab >= 0 ? 1 : 0) +
+ (Vbc >= 0 ? 1 : 0) +
+ (Vca >= 0 ? 1 : 0);
+
+ public bool AllVoltagesDefined =>
+ (Va >= 0) && (Vb >= 0) && (Vc >= 0) &&
+ (Vab >= 0) && (Vbc >= 0) && (Vca >= 0);
+ }
+
+ #endregion
+
+ #region [ Constructors ]
+
+ public VIDataGroup(DataGroup dataGroup)
+ {
+
+ // Initialize each of
+ // the indexes to -1
+ m_vIndices = new List();
+
+ m_iaIndex = -1;
+ m_ibIndex = -1;
+ m_icIndex = -1;
+ m_irIndex = -1;
+
+ // Initialize the data group
+ m_dataGroup = new DataGroup(dataGroup.DataSeries, dataGroup.Asset);
+
+ HashSet connectedAssets = new HashSet(dataGroup.Asset.ConnectedAssets.Select(item => item.ID));
+
+ var groupings = dataGroup.DataSeries
+ .Select((DataSeries, Index) => new { DataSeries, Index })
+ .Where(item => !(item.DataSeries.SeriesInfo is null))
+ .Where(item => item.DataSeries.SeriesInfo.Channel.MeasurementCharacteristic.Name == "Instantaneous")
+ .Where(item => new[] { "Instantaneous", "Values" }.Contains(item.DataSeries.SeriesInfo.SeriesType.Name))
+ .GroupBy(item => item.DataSeries.SeriesInfo.Channel.AssetID)
+ .OrderBy(grouping => grouping.Key == dataGroup.Asset.ID ? 0 : 1)
+ .ThenBy(grouping => connectedAssets.Contains(grouping.Key) ? 0 : 1)
+ .ToList();
+
+ foreach (var grouping in groupings)
+ {
+ VIndices set = new VIndices() { Distance = 0 };
+
+ int assetID = grouping.Key;
+
+ if (assetID != dataGroup.Asset.ID)
+ set.Distance = dataGroup.Asset.DistanceToAsset(assetID);
+
+ foreach (var item in grouping)
+ {
+ string measurementType = item.DataSeries.SeriesInfo.Channel.MeasurementType.Name;
+ string phase = item.DataSeries.SeriesInfo.Channel.Phase.Name;
+
+ if (measurementType == "Voltage" && phase == "AN")
+ set.Va = item.Index;
+
+ if (measurementType == "Voltage" && phase == "BN")
+ set.Vb = item.Index;
+
+ if (measurementType == "Voltage" && phase == "CN")
+ set.Vc = item.Index;
+
+ if (measurementType == "Voltage" && phase == "AB")
+ set.Vab = item.Index;
+
+ if (measurementType == "Voltage" && phase == "BC")
+ set.Vbc = item.Index;
+
+ if (measurementType == "Voltage" && phase == "CA")
+ set.Vca = item.Index;
+
+ if (m_iaIndex < 0 && measurementType == "Current" && phase == "AN")
+ m_iaIndex = item.Index;
+
+ if (m_ibIndex < 0 && measurementType == "Current" && phase == "BN")
+ m_ibIndex = item.Index;
+
+ if (m_icIndex < 0 && measurementType == "Current" && phase == "CN")
+ m_icIndex = item.Index;
+
+ if (m_irIndex < 0 && measurementType == "Current" && phase == "RES")
+ m_irIndex = item.Index;
+ }
+
+ if (set.DefinedLineVoltages + set.DefinedNeutralVoltages > 0)
+ m_vIndices.Add(set);
+ }
+
+ if (m_vIndices.Count() == 0)
+ m_vIndices.Add(new VIndices());
+
+ CalculateMissingCurrentChannel();
+ CalculateMissingLLVoltageChannels();
+
+ m_vIndices.Sort((a, b) =>
+ {
+ if (b.AllVoltagesDefined && !a.AllVoltagesDefined)
+ return 1;
+ if (a.AllVoltagesDefined && !b.AllVoltagesDefined)
+ return -1;
+ if (!(a.Distance >= 0 && b.Distance >= 0))
+ return b.Distance.CompareTo(a.Distance);
+ return a.Distance.CompareTo(b.Distance);
+ });
+ }
+
+ private VIDataGroup()
+ {
+ }
+
+ #endregion
+
+ #region [ Properties ]
+
+ public DataSeries VA => (m_vIndices[0].Va >= 0)
+ ? m_dataGroup[m_vIndices[0].Va]
+ : null;
+
+ public DataSeries VB => (m_vIndices[0].Vb >= 0)
+ ? m_dataGroup[m_vIndices[0].Vb]
+ : null;
+
+ public DataSeries VC => (m_vIndices[0].Vc >= 0)
+ ? m_dataGroup[m_vIndices[0].Vc]
+ : null;
+
+ public DataSeries VAB => (m_vIndices[0].Vab >= 0)
+ ? m_dataGroup[m_vIndices[0].Vab]
+ : null;
+
+ public DataSeries VBC => (m_vIndices[0].Vbc >= 0)
+ ? m_dataGroup[m_vIndices[0].Vbc]
+ : null;
+
+ public DataSeries VCA => (m_vIndices[0].Vca >= 0)
+ ? m_dataGroup[m_vIndices[0].Vca]
+ : null;
+
+ public DataSeries IA => (m_iaIndex >= 0)
+ ? m_dataGroup[m_iaIndex]
+ : null;
+
+ public DataSeries IB => (m_ibIndex >= 0)
+ ? m_dataGroup[m_ibIndex]
+ : null;
+
+ public DataSeries IC => (m_icIndex >= 0)
+ ? m_dataGroup[m_icIndex]
+ : null;
+
+ public DataSeries IR => (m_irIndex >= 0)
+ ? m_dataGroup[m_irIndex]
+ : null;
+
+ public int DefinedNeutralVoltages => m_vIndices
+ .Select(item => item.DefinedNeutralVoltages)
+ .FirstOrDefault();
+
+ public int DefinedLineVoltages => m_vIndices
+ .Select(item => item.DefinedLineVoltages)
+ .FirstOrDefault();
+
+ public int DefinedCurrents =>
+ CurrentIndexes.Count(index => index >= 0);
+
+ public int DefinedPhaseCurrents =>
+ PhaseCurrentIndexes.Count(index => index >= 0);
+
+ public bool AllVIChannelsDefined =>
+ m_vIndices[0].AllVoltagesDefined &&
+ CurrentIndexes.All(index => index >= 0);
+
+ private int[] CurrentIndexes =>
+ new int[] { m_iaIndex, m_ibIndex, m_icIndex, m_irIndex };
+
+ private int[] PhaseCurrentIndexes =>
+ new int[] { m_iaIndex, m_ibIndex, m_icIndex };
+
+ public Asset Asset => m_dataGroup.Asset;
+
+ public DataSeries[] Data
+ {
+ get
+ {
+ List result = new List();
+
+ foreach (VIndices Vindex in m_vIndices)
+ {
+ if (Vindex.Va > -1)
+ result.Add(m_dataGroup[Vindex.Va]);
+ if (Vindex.Vb > -1)
+ result.Add(m_dataGroup[Vindex.Vb]);
+ if (Vindex.Vc > -1)
+ result.Add(m_dataGroup[Vindex.Vc]);
+
+ if (Vindex.Vab > -1)
+ result.Add(m_dataGroup[Vindex.Vab]);
+ if (Vindex.Vbc > -1)
+ result.Add(m_dataGroup[Vindex.Vbc]);
+ if (Vindex.Vca > -1)
+ result.Add(m_dataGroup[Vindex.Vca]);
+ }
+
+ if (m_iaIndex > -1)
+ result.Add(m_dataGroup[m_iaIndex]);
+ if (m_ibIndex > -1)
+ result.Add(m_dataGroup[m_ibIndex]);
+ if (m_icIndex > -1)
+ result.Add(m_dataGroup[m_icIndex]);
+ if (m_irIndex > -1)
+ result.Add(m_dataGroup[m_irIndex]);
+
+ return result.ToArray();
+ }
+ }
+
+ #endregion
+
+ #region [ Methods ]
+
+ ///
+ /// Given three of the four current channels, calculates the
+ /// missing channel based on the relationship IR = IA + IB + IC.
+ ///
+ private void CalculateMissingCurrentChannel()
+ {
+ Meter meter;
+ DataSeries missingSeries;
+
+ // If the data group does not have exactly 3 channels,
+ // then there is no missing channel or there is not
+ // enough data to calculate the missing channel
+ if (DefinedCurrents != 3)
+ return;
+
+ // Get the meter associated with the channels in this data group
+ meter = (IA ?? IB).SeriesInfo.Channel.Meter;
+
+ if (m_iaIndex == -1)
+ {
+ // Calculate IA = IR - IB - IC
+ missingSeries = IR.Add(IB.Negate()).Add(IC.Negate());
+ missingSeries.SeriesInfo = GetSeriesInfo(meter, IR.SeriesInfo.Channel.Asset, "Current", "AN", m_dataGroup.SamplesPerHour);
+ missingSeries.Calculated = true;
+ m_iaIndex = m_dataGroup.DataSeries.Count;
+ m_dataGroup.Add(missingSeries);
+ }
+ else if (m_ibIndex == -1)
+ {
+ // Calculate IB = IR - IA - IC
+ missingSeries = IR.Add(IA.Negate()).Add(IC.Negate());
+ missingSeries.SeriesInfo = GetSeriesInfo(meter, IR.SeriesInfo.Channel.Asset, "Current", "BN", m_dataGroup.SamplesPerHour);
+ missingSeries.Calculated = true;
+ m_ibIndex = m_dataGroup.DataSeries.Count;
+ m_dataGroup.Add(missingSeries);
+ }
+ else if (m_icIndex == -1)
+ {
+ // Calculate IC = IR - IA - IB
+ missingSeries = IR.Add(IA.Negate()).Add(IB.Negate());
+ missingSeries.SeriesInfo = GetSeriesInfo(meter, IR.SeriesInfo.Channel.Asset, "Current", "CN", m_dataGroup.SamplesPerHour);
+ missingSeries.Calculated = true;
+ m_icIndex = m_dataGroup.DataSeries.Count;
+ m_dataGroup.Add(missingSeries);
+ }
+ else
+ {
+ // Calculate IR = IA + IB + IC
+ missingSeries = IA.Add(IB).Add(IC);
+ missingSeries.SeriesInfo = GetSeriesInfo(meter, IA.SeriesInfo.Channel.Asset, "Current", "RES", m_dataGroup.SamplesPerHour);
+ missingSeries.Calculated = true;
+ m_irIndex = m_dataGroup.DataSeries.Count;
+ m_dataGroup.Add(missingSeries);
+ }
+ }
+
+ private void CalculateMissingLLVoltageChannels()
+ {
+ Meter meter;
+ DataSeries missingSeries;
+
+ //Do this for every Voltage set
+ for (int i = 0; i < m_vIndices.Count(); i++)
+ {
+ // If all line voltages are already present or there are not
+ // at least 2 lines we will not perform line to line calculations
+ if (m_vIndices[i].DefinedLineVoltages == 3 || m_vIndices[i].DefinedNeutralVoltages < 2)
+ continue;
+
+ // Get the meter associated with the channels in this data group
+ DataSeries VA = null;
+ DataSeries VB = null;
+ DataSeries VC = null;
+
+ if (m_vIndices[i].Va > -1)
+ VA = m_dataGroup[m_vIndices[i].Va];
+ if (m_vIndices[i].Vb > -1)
+ VB = m_dataGroup[m_vIndices[i].Vb];
+ if (m_vIndices[i].Vc > -1)
+ VC = m_dataGroup[m_vIndices[i].Vc];
+
+ meter = (VA ?? VB ?? VC).SeriesInfo.Channel.Meter;
+
+ if (m_vIndices[i].Vab == -1 && !(VA is null) && !(VB is null))
+ {
+ // Calculate VAB = VA - VB
+ missingSeries = VA.Add(VB.Negate());
+ missingSeries.SeriesInfo = GetSeriesInfo(meter, VA.SeriesInfo.Channel.Asset, "Voltage", "AB", m_dataGroup.SamplesPerHour);
+ missingSeries.Calculated = true;
+ m_vIndices[i].Vab = m_dataGroup.DataSeries.Count;
+ m_dataGroup.Add(missingSeries);
+ }
+
+ if (m_vIndices[i].Vbc == -1 && !(VB is null) && !(VC is null))
+ {
+ // Calculate VBC = VB - VC
+ missingSeries = VB.Add(VC.Negate());
+ missingSeries.SeriesInfo = GetSeriesInfo(meter, VB.SeriesInfo.Channel.Asset, "Voltage", "BC", m_dataGroup.SamplesPerHour);
+ missingSeries.Calculated = true;
+ m_vIndices[i].Vbc = m_dataGroup.DataSeries.Count;
+ m_dataGroup.Add(missingSeries);
+ }
+
+ if (m_vIndices[i].Vca == -1 && !(VC is null) && !(VA is null))
+ {
+ // Calculate VCA = VC - VA
+ missingSeries = VC.Add(VA.Negate());
+ missingSeries.SeriesInfo = GetSeriesInfo(meter, VC.SeriesInfo.Channel.Asset, "Voltage", "CA", m_dataGroup.SamplesPerHour);
+ missingSeries.Calculated = true;
+ m_vIndices[i].Vca = m_dataGroup.DataSeries.Count;
+ m_dataGroup.Add(missingSeries);
+ }
+ }
+ }
+
+ public DataGroup ToDataGroup()
+ {
+ return new DataGroup(m_dataGroup.DataSeries, m_dataGroup.Asset);
+ }
+
+ public VIDataGroup ToSubGroup(int startIndex, int endIndex)
+ {
+ VIDataGroup subGroup = new VIDataGroup();
+
+ subGroup.m_vIndices = m_vIndices;
+ subGroup.m_iaIndex = m_iaIndex;
+ subGroup.m_ibIndex = m_ibIndex;
+ subGroup.m_icIndex = m_icIndex;
+ subGroup.m_irIndex = m_irIndex;
+
+ subGroup.m_dataGroup = m_dataGroup.ToSubGroup(startIndex, endIndex);
+
+ return subGroup;
+ }
+
+ public VIDataGroup ToSubGroup(DateTime startTime, DateTime endTime)
+ {
+ VIDataGroup subGroup = new VIDataGroup();
+
+ subGroup.m_vIndices = m_vIndices;
+ subGroup.m_iaIndex = m_iaIndex;
+ subGroup.m_ibIndex = m_ibIndex;
+ subGroup.m_icIndex = m_icIndex;
+ subGroup.m_irIndex = m_irIndex;
+
+ subGroup.m_dataGroup = m_dataGroup.ToSubGroup(startTime, endTime);
+
+ return subGroup;
+ }
+
+ #endregion
+
+ #region [ Static ]
+
+ // Static Methods
+ private static Series GetSeriesInfo(Meter meter, Asset asset, string measurementTypeName, string phaseName, double samplesPerHour)
+ {
+ string measurementCharacteristicName = "Instantaneous";
+ string seriesTypeName = "Values";
+
+ char typeDesignation = (measurementTypeName == "Current") ? 'I' : measurementTypeName[0];
+ string phaseDesignation = (phaseName == "RES") ? "R" : phaseName.TrimEnd('N');
+ string channelName = string.Concat(typeDesignation, phaseDesignation);
+
+ ChannelKey channelKey = new ChannelKey(asset.ID, 0, channelName, measurementTypeName, measurementCharacteristicName, phaseName);
+ SeriesKey seriesKey = new SeriesKey(channelKey, seriesTypeName);
+
+ Channel dbChannel = (meter.ConnectionFactory is null)
+ ? meter.Channels.FirstOrDefault(channel => channelKey.Equals(new ChannelKey(channel)))
+ : FastSearch(meter, channelKey);
+
+ Series dbSeries = dbChannel?.Series
+ .FirstOrDefault(series => seriesKey.Equals(new SeriesKey(series)));
+
+ if (dbSeries is null)
+ {
+ if (dbChannel is null)
+ {
+ MeasurementType measurementType = new MeasurementType() { Name = measurementTypeName };
+ MeasurementCharacteristic measurementCharacteristic = new MeasurementCharacteristic() { Name = measurementCharacteristicName };
+ Phase phase = new Phase() { Name = phaseName };
+
+ dbChannel = new Channel()
+ {
+ MeterID = meter.ID,
+ AssetID = asset.ID,
+ MeasurementTypeID = measurementType.ID,
+ MeasurementCharacteristicID = measurementCharacteristic.ID,
+ PhaseID = phase.ID,
+ Name = channelKey.Name,
+ SamplesPerHour = samplesPerHour,
+ Description = string.Concat(measurementCharacteristicName, " ", measurementTypeName, " ", phaseName),
+ Enabled = true,
+
+ Meter = meter,
+ Asset = asset,
+ MeasurementType = measurementType,
+ MeasurementCharacteristic = measurementCharacteristic,
+ Phase = phase,
+ Series = new List()
+ };
+
+ meter.Channels.Add(dbChannel);
+ }
+
+ SeriesType seriesType = new SeriesType() { Name = seriesTypeName };
+
+ dbSeries = new Series()
+ {
+ ChannelID = dbChannel.ID,
+ SeriesTypeID = seriesType.ID,
+ SourceIndexes = string.Empty,
+
+ Channel = dbChannel,
+ SeriesType = seriesType
+ };
+
+ dbChannel.Series.Add(dbSeries);
+ }
+
+ return dbSeries;
+ }
+
+ private static Channel FastSearch(Meter meter, ChannelKey channelKey)
+ {
+ using (AdoDataConnection connection = meter.ConnectionFactory())
+ {
+ Channel search = channelKey.Find(connection, meter.ID);
+
+ if (search is null)
+ return null;
+
+ return meter.Channels
+ .FirstOrDefault(channel => channel.ID == search.ID);
+ }
+ }
+
+ #endregion
+ }
+}
diff --git a/src/Libraries/FaultData/FaultData.csproj b/src/Libraries/FaultData/FaultData.csproj
new file mode 100644
index 00000000..f4297a8d
--- /dev/null
+++ b/src/Libraries/FaultData/FaultData.csproj
@@ -0,0 +1,19 @@
+
+
+
+ net9.0
+ enable
+ enable
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/Libraries/PQDS/DataSeries.cs b/src/Libraries/PQDS/DataSeries.cs
new file mode 100644
index 00000000..5767e178
--- /dev/null
+++ b/src/Libraries/PQDS/DataSeries.cs
@@ -0,0 +1,94 @@
+//******************************************************************************************************
+// DataSeries.cs - Gbtc
+//
+// Copyright © 2020, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 03/06/2020 - Christoph Lackner
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace PQDS
+{
+ ///
+ /// Represents a channel in a PQDS File.
+ ///
+ public class DataSeries
+ {
+ #region[Properties]
+
+ private List m_series;
+ private string m_label;
+
+ ///
+ /// A collection of DataPoints.
+ ///
+ public List Series
+ {
+ get { return m_series; }
+ set { m_series = value; }
+ }
+
+ ///
+ /// Label of the
+ ///
+ public string Label { get { return m_label; } }
+
+ ///
+ /// length in number of points
+ ///
+ public int Length => m_series.Count();
+
+ #endregion[Properties]
+
+ ///
+ /// Creates a new .
+ ///
+ /// Label of the DataSeries
+ public DataSeries(string label)
+ {
+ m_label = label;
+ m_series = new List();
+
+ }
+ #region[methods]
+
+ #endregion[methods]
+ }
+
+ ///
+ /// Represents a single Point in the .
+ ///
+ public class DataPoint
+ {
+ ///
+ /// Timestamp of the point.
+ ///
+ public DateTime Time;
+
+ ///
+ /// Value of the point.
+ ///
+ public double Value;
+ }
+
+
+}
diff --git a/src/Libraries/PQDS/MetaDataTag.cs b/src/Libraries/PQDS/MetaDataTag.cs
new file mode 100644
index 00000000..d08fc836
--- /dev/null
+++ b/src/Libraries/PQDS/MetaDataTag.cs
@@ -0,0 +1,421 @@
+//******************************************************************************************************
+// MetaDataTag.cs - Gbtc
+//
+// Copyright © 2020, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 03/06/2020 - Christoph Lackner
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System;
+using System.Collections.Generic;
+
+namespace PQDS
+{
+ ///
+ /// PQDS metadata tag Datatypes according to PQDS spec.
+ ///
+ public enum PQDSMetaDataType
+ {
+ ///
+ /// An integer representing a single value selected
+ /// from among a custom, finite set of possibilities
+ ///
+ Enumeration = 0,
+
+ ///
+ /// A number
+ ///
+ Numeric = 1,
+
+ ///
+ /// Text consisting only of alphabetical characters and digits
+ ///
+ AlphaNumeric = 2,
+
+ ///
+ /// Freeform text
+ ///
+ Text = 3,
+
+ ///
+ /// A Boolean value (true/false)
+ ///
+ Binary = 4
+
+ }
+
+ ///
+ /// Abstract Class of MetaData Tags for a .
+ ///
+ public abstract class MetaDataTag
+ {
+ #region[Properties]
+
+ ///
+ /// The key that identifies the metadata tag.
+ ///
+ protected string m_key;
+
+ ///
+ /// The unit of measurement.
+ ///
+ protected string m_unit;
+
+ ///
+ /// The data type the parser expects to encounter for the value of the metdata.
+ ///
+ protected PQDSMetaDataType m_expectedDataType;
+
+ ///
+ /// Additional notes about the metadata field.
+ ///
+ protected string m_note;
+
+ #endregion[Properties]
+
+ #region[Methods]
+
+ ///
+ /// the Metadata Tag key.
+ ///
+ public String Key { get { return (this.m_key); } }
+
+ ///
+ /// Converst the Metadata tag into a line of a PQDS file
+ ///
+ /// The metadataTag as a String
+ public abstract String Write();
+
+ ///
+ /// Returns the PQDS datatype
+ ///
+ /// The PQDS Datatype
+ public abstract PQDSMetaDataType Type();
+
+ #endregion[Methods]
+ }
+
+ ///
+ /// Class of MetaData Tags for a .
+ ///
+ public class MetaDataTag : MetaDataTag
+ {
+ #region[Properties]
+
+ private DataType m_value;
+
+ ///
+ /// Value of the MetadataTag.
+ ///
+ public DataType Value { get { return m_value; } }
+
+ #endregion[Properties]
+
+ #region[Constructor]
+
+ ///
+ /// Creates a .
+ ///
+ /// key of the MetadataTag
+ /// Value of the MetadataTag
+ public MetaDataTag(String key, DataType value)
+ {
+ this.m_value = value;
+
+ this.m_key = key;
+ if (!keyToDataTypeLookup.TryGetValue(key, out this.m_expectedDataType))
+ this.m_expectedDataType = PQDSMetaDataType.Text;
+
+ if (!keyToUnitLookup.TryGetValue(key, out this.m_unit))
+ this.m_unit = null;
+
+ if (!keyToNoteLookup.TryGetValue(key, out this.m_note))
+ this.m_note = null;
+
+ //Check to ensure a string does not end up being a number etc...
+ if (this.m_expectedDataType == PQDSMetaDataType.AlphaNumeric)
+ {
+ if (!((value is string) | (value is Guid)))
+ { throw new InvalidCastException("Can not cast object to Alphanumeric Type"); }
+ }
+ else if (this.m_expectedDataType == PQDSMetaDataType.Numeric)
+ {
+ if (!((value is int) | (value is double)))
+ { throw new InvalidCastException("Can not cast object to Numeric Type"); }
+ }
+ else if (this.m_expectedDataType == PQDSMetaDataType.Enumeration)
+ {
+ if (!((value is int)))
+ { throw new InvalidCastException("Can not cast object to Numeric Type"); }
+ }
+ else if (this.m_expectedDataType == PQDSMetaDataType.Binary)
+ {
+ if (!((value is int) | (value is Boolean)))
+ { throw new InvalidCastException("Can not cast object to Numeric Type"); }
+ }
+
+ }
+
+ ///
+ /// Creates a custom .
+ ///
+ /// key of the MetadataTag
+ /// Value of the MetadataTag
+ /// The of the metadata tag
+ /// The unit of the metadata tag
+ /// a describtion of the metadata tag
+ public MetaDataTag(String key, DataType value, PQDSMetaDataType valueType, String unit, String description)
+ {
+ this.m_value = value;
+
+ this.m_key = key;
+ this.m_expectedDataType = valueType;
+
+ if (unit.Trim('"') == "") { this.m_unit = null; }
+ else { this.m_unit = unit.Trim('"'); }
+
+ if (description.Trim('"') == "") { this.m_note = null; }
+ else { this.m_note = description.Trim('"'); }
+
+ }
+
+ #endregion[Constructor]
+
+ #region[Methods]
+
+ ///
+ /// Converst the Metadata tag into a line of a PQDS file
+ ///
+ /// The metadataTag as a String
+ public override string Write()
+ {
+ string result = String.Format("{0},\"{1}\",{2},{3},\"{4}\"",
+ this.m_key, this.m_value, this.m_unit, DataTypeToCSV(this.m_expectedDataType), this.m_note);
+
+ return result;
+ }
+
+ ///
+ /// Returns the PQDS datatype
+ ///
+ /// The PQDS Datatype
+ public override PQDSMetaDataType Type()
+ {
+ return this.m_expectedDataType;
+ }
+
+ #endregion[Methods]
+
+ #region[Statics]
+
+ private static readonly Dictionary keyToDataTypeLookup = new Dictionary()
+ {
+ {"DeviceName", PQDSMetaDataType.Text },
+ {"DeviceAlias", PQDSMetaDataType.Text },
+ {"DeviceLocation", PQDSMetaDataType.Text },
+ {"DeviceLocationAlias", PQDSMetaDataType.Text },
+ {"DeviceLatitude", PQDSMetaDataType.Text },
+ {"DeviceLongitude", PQDSMetaDataType.Text },
+ {"Accountname", PQDSMetaDataType.Text },
+ {"AccountNameAlias", PQDSMetaDataType.Text },
+ {"DeviceDistanceToXFMR", PQDSMetaDataType.Numeric },
+ {"DeviceConnectionTypeCode", PQDSMetaDataType.Enumeration },
+ {"DeviceOwner", PQDSMetaDataType.Text },
+ {"NominalVoltage-LG", PQDSMetaDataType.Numeric },
+ {"NominalFrequency", PQDSMetaDataType.Numeric },
+ {"UpstreamXFMR-kVA", PQDSMetaDataType.Numeric },
+ {"LineLength", PQDSMetaDataType.Numeric },
+ {"AssetName", PQDSMetaDataType.Text },
+ {"EventGUID", PQDSMetaDataType.AlphaNumeric },
+ {"EventID", PQDSMetaDataType.Text },
+ {"EventYear", PQDSMetaDataType.Enumeration },
+ {"EventMonth", PQDSMetaDataType.Enumeration },
+ {"EventDay", PQDSMetaDataType.Enumeration },
+ {"EventHour", PQDSMetaDataType.Enumeration },
+ {"EventMinute", PQDSMetaDataType.Enumeration },
+ {"EventSecond", PQDSMetaDataType.Enumeration },
+ {"EventNanoSecond", PQDSMetaDataType.Numeric },
+ {"EventDate", PQDSMetaDataType.Text },
+ {"EventTime", PQDSMetaDataType.Text },
+ {"EventTypeCode", PQDSMetaDataType.Enumeration },
+ {"EventFaultTypeCode", PQDSMetaDataType.Enumeration },
+ {"EventPeakCurrent", PQDSMetaDataType.Numeric },
+ {"EventPeakVoltage", PQDSMetaDataType.Numeric },
+ {"EventMaxVA", PQDSMetaDataType.Numeric },
+ {"EventMaxVB", PQDSMetaDataType.Numeric },
+ {"EventMaxVC", PQDSMetaDataType.Numeric },
+ {"EventMinVA", PQDSMetaDataType.Numeric },
+ {"EventMinVB", PQDSMetaDataType.Numeric },
+ {"EventMinVC", PQDSMetaDataType.Numeric },
+ {"EventMaxIA", PQDSMetaDataType.Numeric },
+ {"EventMaxIB", PQDSMetaDataType.Numeric },
+ {"EventMaxIC", PQDSMetaDataType.Numeric },
+ {"EventPreEventCurrent", PQDSMetaDataType.Numeric },
+ {"EventPreEventVoltage", PQDSMetaDataType.Numeric },
+ {"EventDuration", PQDSMetaDataType.Numeric },
+ {"EventFaultI2T", PQDSMetaDataType.Numeric },
+ {"DistanceToFault", PQDSMetaDataType.Numeric },
+ {"EventCauseCode", PQDSMetaDataType.Enumeration },
+ {"WaveformDataType", PQDSMetaDataType.Enumeration },
+ {"WaveFormSensitivityCode", PQDSMetaDataType.Enumeration },
+ {"WaveFormSensitivityNote", PQDSMetaDataType.Text },
+ {"Utility", PQDSMetaDataType.Text },
+ {"ContactEmail", PQDSMetaDataType.Text }
+ };
+
+ private static readonly Dictionary keyToUnitLookup = new Dictionary()
+ {
+ {"DeviceName", null },
+ {"DeviceAlias", null },
+ {"DeviceLocation", null },
+ {"DeviceLocationAlias", null },
+ {"DeviceLatitude", null },
+ {"DeviceLongitude", null },
+ {"Accountname", null },
+ {"AccountNameAlias", null },
+ {"DeviceDistanceToXFMR", "feet" },
+ {"DeviceConnectionTypeCode", null },
+ {"DeviceOwner", null },
+ {"NominalVoltage-LG", "Volts" },
+ {"NominalFrequency", "Hz" },
+ {"UpstreamXFMR-kVA", "kVA" },
+ {"LineLength", "miles" },
+ {"AssetName", null },
+ {"EventGUID", null },
+ {"EventID", null },
+ {"EventYear", null },
+ {"EventMonth", null },
+ {"EventDay", null },
+ {"EventHour", null },
+ {"EventMinute", null },
+ {"EventSecond", null },
+ {"EventNanoSecond", null },
+ {"EventDate", null },
+ {"EventTime", null },
+ {"EventTypeCode", null },
+ {"EventFaultTypeCode", null },
+ {"EventPeakCurrent", "Amps" },
+ {"EventPeakVoltage", "Volts" },
+ {"EventMaxVA", "Volts" },
+ {"EventMaxVB", "Volts" },
+ {"EventMaxVC", "Volts" },
+ {"EventMinVA", "Volts" },
+ {"EventMinVB", "Volts" },
+ {"EventMinVC", "Volts" },
+ {"EventMaxIA", "Amps" },
+ {"EventMaxIB", "Amps" },
+ {"EventMaxIC", "Amps" },
+ {"EventPreEventCurrent", "Amps" },
+ {"EventPreEventVoltage", "Volts" },
+ {"EventDuration", "ms" },
+ {"EventFaultI2T", "A2s" },
+ {"DistanceToFault", "miles" },
+ {"EventCauseCode", null },
+ {"WaveformDataType", null },
+ {"WaveFormSensitivityCode", null },
+ {"WaveFormSensitivityNote", null },
+ {"Utility", null },
+ {"ContactEmail", null }
+ };
+
+ private static readonly Dictionary keyToNoteLookup = new Dictionary()
+ {
+ {"DeviceName", "Meter or measurement device name" },
+ {"DeviceAlias", "Alternate meter or measurement device name" },
+ {"DeviceLocation", "Meter or measurment device location name" },
+ {"DeviceLocationAlias", "Alternate meter or device location name" },
+ {"DeviceLatitude", "Latitude" },
+ {"DeviceLongitude", "Longtitude" },
+ {"Accountname", "Name of customer or account" },
+ {"AccountNameAlias", "Alternate name of customer or account" },
+ {"DeviceDistanceToXFMR", "Distance to the upstream transformer" },
+ {"DeviceConnectionTypeCode", "PQDS code for meter connection type" },
+ {"DeviceOwner", "Utility name" },
+ {"NominalVoltage-LG", "Nominal Line to Ground Voltage" },
+ {"NominalFrequency", "Nominal System frequency" },
+ {"UpstreamXFMR-kVA", "Upstream Transformer size" },
+ {"LineLength", "Length of the Line" },
+ {"AssetName", "Asset name" },
+ {"EventGUID", "Globally Unique Event Identifier" },
+ {"EventID", "A user defined Event Name" },
+ {"EventYear", "Year" },
+ {"EventMonth", "Month" },
+ {"EventDay", "Day" },
+ {"EventHour", "Hour" },
+ {"EventMinute", "Minute" },
+ {"EventSecond", "Second" },
+ {"EventNanoSecond", "Nanosconds" },
+ {"EventDate", "Event Date" },
+ {"EventTime", "Event Time" },
+ {"EventTypeCode", "PQDS Event Type Code" },
+ {"EventFaultTypeCode", "PQDS Fault Type Code" },
+ {"EventPeakCurrent", "Peak Current"},
+ {"EventPeakVoltage", "Peak Voltage" },
+ {"EventMaxVA", "RMS Maximum A Phase Voltage" },
+ {"EventMaxVB", "RMS Maximum B Phase Voltage" },
+ {"EventMaxVC", "RMS Maximum C Phase Voltage" },
+ {"EventMinVA", "RMS Minimum A Phase Voltage" },
+ {"EventMinVB", "RMS Minimum B Phase Voltage" },
+ {"EventMinVC", "RMS Minimum C Phase Voltage" },
+ {"EventMaxIA", "RMS Maximum A Phase Current" },
+ {"EventMaxIB", "RMS Maximum B Phase Current" },
+ {"EventMaxIC", "RMS Maximum C Phase Current" },
+ {"EventPreEventCurrent", "Pre Event Current" },
+ {"EventPreEventVoltage", "pre Event Voltage" },
+ {"EventDuration", "Event Duration" },
+ {"EventFaultI2T", "I2(t) during Fault duration" },
+ {"DistanceToFault", "Distance to Fault" },
+ {"EventCauseCode", "PQDS Event Cause Code" },
+ { "WaveformDataType", "PQDS Data Type Code"},
+ {"WaveFormSensitivityCode", "PQDS Data Sensitivity Code" },
+ {"WaveFormSensitivityNote", "Notes on the PQDS Data Sensitivity Code" },
+ {"Utility", "Utility that Generated this Dataset" },
+ {"ContactEmail", "Contact for Utility that Created this Dataset" }
+ };
+
+ private static string DataTypeToCSV(PQDSMetaDataType dataType)
+ {
+ switch (dataType)
+ {
+ case (PQDSMetaDataType.Text):
+ return "T";
+ case (PQDSMetaDataType.Numeric):
+ return "N";
+ case (PQDSMetaDataType.Enumeration):
+ return "E";
+ case (PQDSMetaDataType.AlphaNumeric):
+ return "A";
+ case (PQDSMetaDataType.Binary):
+ return "B";
+ default:
+ return "T";
+ }
+ }
+
+
+ #endregion[Statics]
+
+
+
+
+
+
+ }
+
+
+}
diff --git a/src/Libraries/PQDS/PQDS.csproj b/src/Libraries/PQDS/PQDS.csproj
new file mode 100644
index 00000000..c29cf134
--- /dev/null
+++ b/src/Libraries/PQDS/PQDS.csproj
@@ -0,0 +1,23 @@
+
+
+ netstandard2.0
+ Library
+ PQDS
+ PQDS
+ Copyright © 2020
+ 3.0.5.69
+ 3.0.5.69
+
+
+ ..\..\..\Build\Output\Debug\Libraries\
+ ..\..\..\Build\Output\Debug\Libraries\PQDS.xml
+
+
+ ..\..\..\Build\Output\Release\Libraries\
+ ..\..\..\Build\Output\Release\Libraries\PQDS.xml
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/Libraries/PQDS/PQDSFile.cs b/src/Libraries/PQDS/PQDSFile.cs
new file mode 100644
index 00000000..17f3cc52
--- /dev/null
+++ b/src/Libraries/PQDS/PQDSFile.cs
@@ -0,0 +1,541 @@
+//******************************************************************************************************
+// PQDSFile.cs - Gbtc
+//
+// Copyright © 2020, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the Eclipse Public License -v 1.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://www.opensource.org/licenses/eclipse-1.0.php
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 03/06/2020 - Christoph Lackner
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.IO;
+using System.Linq;
+
+namespace PQDS
+{
+ ///
+ /// Class that represents a PQDS file.
+ ///
+ public class PQDSFile
+ {
+ #region[Properties]
+
+ private List m_metaData;
+ private List m_Data;
+ private DateTime m_initialTS;
+
+ #endregion[Properties]
+
+ #region[Constructors]
+ ///
+ /// Creates a new PQDS file.
+ ///
+ /// Measurment data to be included as
+ /// Timestamp used as the beginning of the PQDS file
+ /// List of MetaData to be included in the PQDS file as
+ public PQDSFile(List metaData, List dataSeries, DateTime initialTimeStamp)
+ {
+ if (metaData is null) { this.m_metaData = new List(); }
+ else { this.m_metaData = metaData; }
+
+ this.m_initialTS = initialTimeStamp;
+ this.m_Data = dataSeries;
+ }
+
+ ///
+ /// Creates a new PQDS file.
+ ///
+ public PQDSFile()
+ {
+ this.m_metaData = new List();
+ this.m_Data = new List();
+ }
+
+ #endregion[Constructors]
+
+ #region[Methods]
+
+ private void GetStartTime()
+ {
+ DateTime result;
+ int? day = null;
+ int? month = null;
+ int? year = null;
+
+ if (this.m_metaData.Select(item => item.Key).Contains("eventdate"))
+ {
+ string val = ((MetaDataTag)this.m_metaData.Find(item => item.Key == "eventdate")).Value;
+ if (DateTime.TryParseExact(val, "MM/dd/yyyy", CultureInfo.InvariantCulture, DateTimeStyles.None, out result))
+ {
+ day = result.Day;
+ month = result.Month;
+ year = result.Year;
+ }
+ }
+ if (day is null)
+ {
+ if (this.m_metaData.Select(item => item.Key).Contains("eventday"))
+ {
+ day = ((MetaDataTag)this.m_metaData.Find(item => item.Key == "eventday")).Value;
+ }
+ else
+ {
+ day = DateTime.Now.Day;
+ }
+ }
+ if (month is null)
+ {
+ if (this.m_metaData.Select(item => item.Key).Contains("eventmonth"))
+ {
+ month = ((MetaDataTag)this.m_metaData.Find(item => item.Key == "eventmonth")).Value;
+ }
+ else
+ {
+ month = DateTime.Now.Month;
+ }
+ }
+ if (year is null)
+ {
+ if (this.m_metaData.Select(item => item.Key).Contains("eventyear"))
+ {
+ year = ((MetaDataTag)this.m_metaData.Find(item => item.Key == "eventyear")).Value;
+ }
+ else
+ {
+ year = DateTime.Now.Year;
+ }
+ }
+
+ int? hour = null;
+ int? minute = null;
+ int? second = null;
+
+ if (this.m_metaData.Select(item => item.Key).Contains("eventtime"))
+ {
+ string val = ((MetaDataTag)this.m_metaData.Find(item => item.Key == "eventtime")).Value;
+ if (DateTime.TryParseExact(val, "HH:mm:ss", CultureInfo.InvariantCulture, DateTimeStyles.None, out result))
+ {
+ hour = result.Hour;
+ minute = result.Minute;
+ second = result.Second;
+ }
+ }
+ if (hour is null)
+ {
+ if (this.m_metaData.Select(item => item.Key).Contains("eventhour"))
+ {
+ hour = ((MetaDataTag)this.m_metaData.Find(item => item.Key == "eventhour")).Value;
+ }
+ else
+ {
+ hour = DateTime.Now.Hour;
+ }
+ }
+ if (minute is null)
+ {
+ if (this.m_metaData.Select(item => item.Key).Contains("eventminute"))
+ {
+ minute = ((MetaDataTag)this.m_metaData.Find(item => item.Key == "eventminute")).Value;
+ }
+ else
+ {
+ minute = DateTime.Now.Minute;
+ }
+ }
+ if (second is null)
+ {
+ if (this.m_metaData.Select(item => item.Key).Contains("eventsecond"))
+ {
+ second = ((MetaDataTag)this.m_metaData.Find(item => item.Key == "eventsecond")).Value;
+ }
+ else
+ {
+ second = DateTime.Now.Second;
+ }
+ }
+
+
+ result = new DateTime((int)year, (int)month, (int)day, (int)hour, (int)minute, (int)second);
+
+ this.m_initialTS = result;
+ }
+
+ private MetaDataTag CreateMetaData(string[] flds)
+ {
+
+ string dataTypeString = flds[3].Trim().ToUpper();
+ PQDSMetaDataType dataType;
+
+ switch (dataTypeString)
+ {
+ case "N":
+ {
+ dataType = PQDSMetaDataType.Numeric;
+ break;
+ }
+ case "E":
+ {
+ dataType = PQDSMetaDataType.Enumeration;
+ break;
+ }
+ case "B":
+ {
+ dataType = PQDSMetaDataType.Binary;
+ break;
+ }
+ case "A":
+ {
+ dataType = PQDSMetaDataType.AlphaNumeric;
+ break;
+ }
+ default:
+ {
+ dataType = PQDSMetaDataType.Text;
+ break;
+ }
+ }
+
+ string key = flds[0].Trim().ToLower();
+ string note = flds[4].Trim('"');
+ string unit = flds[2].Trim('"');
+
+ switch (dataType)
+ {
+ case (PQDSMetaDataType.AlphaNumeric):
+ {
+ string value = flds[1].Trim('"');
+ return new MetaDataTag(key, value, dataType, unit, note);
+ }
+ case (PQDSMetaDataType.Text):
+ {
+ string value = flds[1].Trim('"');
+ return new MetaDataTag(key, value, dataType, unit, note);
+ }
+ case (PQDSMetaDataType.Enumeration):
+ {
+ int value = Convert.ToInt32(flds[1].Trim('"'));
+ return new MetaDataTag(key, value, dataType, unit, note);
+ }
+ case (PQDSMetaDataType.Numeric):
+ {
+ double value = Convert.ToDouble(flds[1].Trim('"'));
+ return new MetaDataTag(key, value, dataType, unit, note);
+ }
+ case (PQDSMetaDataType.Binary):
+ {
+ Boolean value = Convert.ToBoolean(flds[1].Trim('"'));
+ return new MetaDataTag(key, value, dataType, unit, note);
+ }
+ default:
+ {
+ string value = flds[1].Trim('"');
+ return new MetaDataTag(key, value, dataType, unit, note);
+ }
+ }
+
+
+ }
+
+ private Boolean IsDataHeader(string line)
+ {
+ if (!line.Contains(","))
+ return false;
+ String[] flds = line.Split(',');
+
+ if (flds[0].ToLower().Trim() == "waveform-data")
+ return true;
+
+ return false;
+ }
+
+ ///
+ /// List of included Metadata tags.
+ ///
+ public List MetaData
+ {
+ get { return this.m_metaData; }
+ }
+
+ ///
+ /// List of data included in PQDS file as .
+ ///
+ public List Data
+ {
+ get { return this.m_Data; }
+ }
+
+ ///
+ /// Writes the content to a .csv file.
+ ///
+ /// The to write the data to.
+ /// Progress Token
+ public void WriteToStream(StreamWriter stream, IProgress progress)
+ {
+ int n_data = this.Data.Select((item) => item.Length).Max();
+ int n_total = n_data + n_data + this.m_metaData.Count() + 1;
+
+ //create the metadata header
+ List lines = new List();
+ lines = this.m_metaData.Select(item => item.Write()).ToList();
+
+ lines.AddRange(DataLines(n_total, progress));
+
+ for (int i = 0; i < lines.Count(); i++)
+ {
+ stream.WriteLine(lines[i]);
+ progress.Report((double)(n_data + i) / n_total);
+ }
+
+
+ }
+
+ ///
+ /// Writes the content to a .csv file.
+ ///
+ /// file name
+ /// Progress Token
+ public void WriteToFile(string file, IProgress progress)
+ {
+ // Open the file and write in each line
+ using (StreamWriter fileWriter = new StreamWriter(File.OpenWrite(file)))
+ {
+ WriteToStream(fileWriter, progress);
+ }
+
+ }
+ ///
+ /// Writes the content to a .csv file.
+ ///
+ /// file name
+ public void WriteToFile(string file)
+ {
+ Progress prog = new Progress();
+ WriteToFile(file, prog);
+ }
+
+ ///
+ /// Writes the content to an output Stream.
+ ///
+ /// The to write the data to.
+ public void WriteToStream(StreamWriter stream)
+ {
+ Progress prog = new Progress();
+ WriteToStream(stream, prog);
+ }
+
+
+
+ ///
+ /// Reads the content from a PQDS File.
+ ///
+ /// file name
+ public void ReadFromFile(string filename)
+ {
+ Progress prog = new Progress();
+ ReadFromFile(filename, prog);
+ }
+
+
+ ///
+ /// Reads the content from a PQDS File.
+ ///
+ /// file name
+ /// Progress Token
+ public void ReadFromFile(string filename, IProgress progress)
+ {
+ List lines = new List();
+ // Open the file and read each line
+ using (StreamReader fileReader = new StreamReader(File.OpenRead(filename)))
+ {
+ while (!fileReader.EndOfStream)
+ {
+ lines.Add(fileReader.ReadLine().Trim());
+ }
+ }
+
+ int index = 0;
+ String[] flds;
+ // Parse MetaData Section
+ this.m_metaData = new List();
+
+ while (!(IsDataHeader(lines[index])))
+ {
+ if (!lines[index].Contains(","))
+ {
+ index++;
+ continue;
+ }
+
+ flds = lines[index].Split(',');
+
+ if (flds.Count() < 5)
+ {
+ index++;
+ continue;
+ }
+ this.m_metaData.Add(CreateMetaData(flds));
+ index++;
+
+ if (index == lines.Count())
+ { throw new InvalidDataException("PQDS File not valid"); }
+ progress.Report((double)index / (double)lines.Count());
+ }
+
+ //Parse Data Header
+ flds = lines[index].Split(',');
+
+ if (flds.Count() < 2)
+ {
+ throw new InvalidDataException("PQDS File has invalid data section or no data");
+ }
+
+ this.m_Data = new List();
+ List signals = new List();
+ List> data = new List>();
+
+
+ for (int i = 1; i < flds.Count(); i++)
+ {
+ if (signals.Contains(flds[i].Trim().ToLower()))
+ {
+ continue;
+ }
+ this.m_Data.Add(new DataSeries(flds[i].Trim().ToLower()));
+ signals.Add(flds[i].Trim().ToLower());
+ data.Add(new List());
+ }
+
+ index++;
+ //Parse Data
+ GetStartTime();
+
+ while (index < lines.Count())
+ {
+ if (!lines[index].Contains(","))
+ {
+ index++;
+ continue;
+ }
+
+ flds = lines[index].Split(',');
+
+ if (flds.Count() != (this.m_Data.Count() + 1))
+ {
+ index++;
+ continue;
+ }
+ DateTime TS;
+ try
+ {
+ double ticks = Convert.ToDouble(flds[0].Trim());
+ TS = this.m_initialTS + new TimeSpan((Int64)(ticks * 100));
+ }
+ catch
+ {
+ index++;
+ continue;
+ }
+
+ for (int i = 0; i < signals.Count(); i++)
+ {
+ try
+ {
+ double value = Convert.ToDouble(flds[i + 1].Trim());
+ data[i].Add(new DataPoint() { Time = TS, Value = value });
+ }
+ catch
+ {
+ continue;
+ }
+ }
+
+ progress.Report((double)index / (double)lines.Count());
+ index++;
+ }
+
+ for (int i = 0; i < signals.Count(); i++)
+ {
+ int j = this.m_Data.FindIndex(item => item.Label == signals[i]);
+ this.m_Data[j].Series = data[j];
+ }
+ }
+
+ private List DataLines(int n_total, IProgress progress)
+ {
+ List result = new List();
+
+ //ensure they all start at the same Time
+ List measurements = this.m_Data.Select(item => item.Label).ToList();
+ DateTime initalStart = this.m_Data.Select(item => item.Series[0].Time).Min();
+ List startTime = this.m_Data.Select(item => item.Series[0].Time - initalStart).ToList();
+
+ //1 ms difference is ok
+ if (startTime.Max().TotalMilliseconds > 1)
+ {
+ throw new Exception("The measurements start at different times");
+ }
+
+ //write the header
+ result.Add("waveform-data," + String.Join(",", measurements));
+
+
+ //write the Data
+ // Logic for skipping datapoints if they don't have the same sampling rate
+ List samplingRates = m_Data.Select(item => item.Length).Distinct().ToList();
+
+ int n_data = samplingRates.Max();
+
+ Dictionary> reSampling = new Dictionary>();
+
+ if (samplingRates.Any(f => ((double)n_data / (double)f) % 1 != 0))
+ throw new Exception("Sampling Rates in this File do not match and are not multiples of each other.");
+
+ reSampling = samplingRates.Select(item => new KeyValuePair>(item, (int index, DataSeries ds) => {
+ int n = n_data / item;
+ if (index % n == 0)
+ return ds.Series[index / n].Value;
+ else
+ return double.NaN;
+ }))
+ .ToDictionary(item => item.Key, item => item.Value);
+
+ for (int i = 0; i < n_data; i++)
+ {
+ TimeSpan dT = m_Data[0].Series[i].Time - m_initialTS;
+ result.Add(Convert.ToString(dT.TotalMilliseconds) + "," +
+ String.Join(",", m_Data.Select(item => {
+ double v = reSampling[item.Length](i, item);
+ if (double.IsNaN(v))
+ return "NaN".PadLeft(12);
+ return String.Format("{0:F12}", v);
+ }).ToList()));
+ progress.Report((double)i / (double)n_total);
+ }
+
+ return result;
+
+ }
+
+ #endregion[Methods]
+
+ }
+
+
+}
diff --git a/src/Libraries/PQDS/Properties/AssemblyInfo.cs b/src/Libraries/PQDS/Properties/AssemblyInfo.cs
new file mode 100644
index 00000000..b6c46133
--- /dev/null
+++ b/src/Libraries/PQDS/Properties/AssemblyInfo.cs
@@ -0,0 +1,13 @@
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible
+// to COM components. If you need to access a type in this assembly from
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("c6e64ba2-dca7-4a34-973a-2306a1f9effc")]
diff --git a/src/Libraries/openXDA.Model/Channels/Channel.cs b/src/Libraries/openXDA.Model/Channels/Channel.cs
new file mode 100644
index 00000000..491748e4
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Channels/Channel.cs
@@ -0,0 +1,580 @@
+//******************************************************************************************************
+// Channel.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/29/2017 - Billy Ernest
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System.ComponentModel;
+using System.ComponentModel.DataAnnotations;
+using System.Data;
+using System.Transactions;
+using Gemstone.Data;
+using Gemstone.Data.Model;
+using Newtonsoft.Json;
+using IsolationLevel = System.Transactions.IsolationLevel;
+
+namespace openXDA.Model
+{
+ public class ChannelKey : IEquatable
+ {
+ #region [ Constructors ]
+
+ public ChannelKey(int assetID, int harmonicGroup, string name, string measurementType, string measurementCharacteristic, string phase)
+ {
+ LineID = assetID;
+ HarmonicGroup = harmonicGroup;
+ Name = name;
+ MeasurementType = measurementType;
+ MeasurementCharacteristic = measurementCharacteristic;
+ Phase = phase;
+ }
+
+ public ChannelKey(Channel channel)
+ : this(channel.AssetID, channel.HarmonicGroup, channel.Name, channel.MeasurementType.Name, channel.MeasurementCharacteristic.Name, channel.Phase.Name)
+ {
+ }
+
+ #endregion
+
+ #region [ Properties ]
+
+ public int LineID { get; }
+ public int HarmonicGroup { get; }
+ public string Name { get; }
+ public string MeasurementType { get; }
+ public string MeasurementCharacteristic { get; }
+ public string Phase { get; }
+
+ #endregion
+
+ #region [ Methods ]
+
+ public Channel Find(AdoDataConnection connection, int meterID)
+ {
+ const string QueryFormat =
+ "SELECT Channel.* " +
+ "FROM " +
+ " Channel JOIN " +
+ " MeasurementType ON Channel.MeasurementTypeID = MeasurementType.ID JOIN " +
+ " MeasurementCharacteristic ON Channel.MeasurementCharacteristicID = MeasurementCharacteristic.ID JOIN " +
+ " Phase ON Channel.PhaseID = Phase.ID " +
+ "WHERE " +
+ " Channel.MeterID = {0} AND " +
+ " Channel.AssetID = {1} AND " +
+ " Channel.HarmonicGroup = {2} AND " +
+ " Channel.Name = {3} AND " +
+ " MeasurementType.Name = {4} AND " +
+ " MeasurementCharacteristic.Name = {5} AND " +
+ " Phase.Name = {6}";
+
+ object[] parameters =
+ {
+ meterID,
+ LineID,
+ HarmonicGroup,
+ Name,
+ MeasurementType,
+ MeasurementCharacteristic,
+ Phase
+ };
+
+ using (DataTable table = connection.RetrieveData(QueryFormat, parameters))
+ {
+ if (table.Rows.Count == 0)
+ return null;
+
+ TableOperations channelTable = new TableOperations(connection);
+ return channelTable.LoadRecord(table.Rows[0]);
+ }
+ }
+
+ public override int GetHashCode()
+ {
+ StringComparer stringComparer = StringComparer.OrdinalIgnoreCase;
+
+ int hash = 1009;
+ hash = 9176 * hash + LineID.GetHashCode();
+ hash = 9176 * hash + HarmonicGroup.GetHashCode();
+ hash = 9176 * hash + stringComparer.GetHashCode(Name);
+ hash = 9176 * hash + stringComparer.GetHashCode(MeasurementType);
+ hash = 9176 * hash + stringComparer.GetHashCode(MeasurementCharacteristic);
+ hash = 9176 * hash + stringComparer.GetHashCode(Phase);
+ return hash;
+ }
+
+ public override bool Equals(object obj)
+ {
+ return Equals(obj as ChannelKey);
+ }
+
+ public bool Equals(ChannelKey other)
+ {
+ if (other is null)
+ return false;
+
+ StringComparison stringComparison = StringComparison.OrdinalIgnoreCase;
+
+ return
+ LineID.Equals(other.LineID) &&
+ HarmonicGroup.Equals(other.HarmonicGroup) &&
+ Name.Equals(other.Name, stringComparison) &&
+ MeasurementType.Equals(other.MeasurementType, stringComparison) &&
+ MeasurementCharacteristic.Equals(other.MeasurementCharacteristic, stringComparison) &&
+ Phase.Equals(other.Phase, stringComparison);
+ }
+
+ #endregion
+ }
+
+ [TableName("Channel")]
+ public class ChannelBase
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ [ParentKey(typeof(Meter))]
+ public int MeterID { get; set; }
+
+ public int AssetID { get; set; }
+
+ public int MeasurementTypeID { get; set; }
+
+ public int MeasurementCharacteristicID { get; set; }
+
+ public int PhaseID { get; set; }
+
+ [StringLength(200)]
+ public string Name { get; set; }
+
+ public double Adder { get; set; }
+
+ [DefaultValue(1.0D)]
+ public double Multiplier { get; set; } = 1.0D;
+
+ public double SamplesPerHour { get; set; }
+
+ public double? PerUnitValue { get; set; }
+
+ public int HarmonicGroup { get; set; }
+
+ public string Description { get; set; }
+
+ public bool Enabled { get; set; }
+
+ [DefaultValue(false)]
+ public bool Trend { get; set; }
+
+ [DefaultValue(0)]
+ public int ConnectionPriority { get; set; } = 0;
+ }
+
+ public class Channel : ChannelBase
+ {
+ #region [ Members ]
+
+ // Fields
+ private MeasurementType m_measurementType;
+ private MeasurementCharacteristic m_measurementCharacteristic;
+ private Phase m_phase;
+ private Meter m_meter;
+ private Asset m_asset;
+ private List m_series;
+
+ #endregion
+
+ #region [ Properties ]
+
+ [JsonIgnore]
+ [NonRecordField]
+ public MeasurementType MeasurementType
+ {
+ get
+ {
+ if (m_measurementType is null)
+ m_measurementType = LazyContext.GetMeasurementType(MeasurementTypeID);
+
+ if (m_measurementType is null)
+ m_measurementType = QueryMeasurementType();
+
+ return m_measurementType;
+ }
+ set => m_measurementType = value;
+ }
+
+ [JsonIgnore]
+ [NonRecordField]
+ public MeasurementCharacteristic MeasurementCharacteristic
+ {
+ get
+ {
+ if (m_measurementCharacteristic is null)
+ m_measurementCharacteristic = LazyContext.GetMeasurementCharacteristic(MeasurementCharacteristicID);
+
+ if (m_measurementCharacteristic is null)
+ m_measurementCharacteristic = QueryMeasurementCharacteristic();
+
+ return m_measurementCharacteristic;
+ }
+ set => m_measurementCharacteristic = value;
+ }
+
+ [JsonIgnore]
+ [NonRecordField]
+ public Phase Phase
+ {
+ get
+ {
+ if (m_phase is null)
+ m_phase = LazyContext.GetPhase(PhaseID);
+
+ if (m_phase is null)
+ m_phase = QueryPhase();
+
+ return m_phase;
+ }
+ set => m_phase = value;
+ }
+
+ [JsonIgnore]
+ [NonRecordField]
+ public Meter Meter
+ {
+ get
+ {
+ if (m_meter is null)
+ m_meter = LazyContext.GetMeter(MeterID);
+
+ if (m_meter is null)
+ m_meter = QueryMeter();
+
+ return m_meter;
+ }
+ set => m_meter = value;
+ }
+
+ [JsonIgnore]
+ [NonRecordField]
+ public Asset Asset
+ {
+ get
+ {
+ if (m_asset is null)
+ m_asset = LazyContext.GetAsset(AssetID);
+
+ if (m_asset is null)
+ m_asset = QueryAsset();
+
+ return m_asset;
+ }
+ set => m_asset = value;
+ }
+
+ [JsonIgnore]
+ [NonRecordField]
+ public List Series
+ {
+ get => m_series ?? (m_series = QuerySeries());
+ set => m_series = value;
+ }
+
+ [JsonIgnore]
+ [NonRecordField]
+ public Func ConnectionFactory
+ {
+ get => LazyContext.ConnectionFactory;
+ set => LazyContext.ConnectionFactory = value;
+ }
+
+ [JsonIgnore]
+ [NonRecordField]
+ internal LazyContext LazyContext { get; set; } = new LazyContext();
+
+ #endregion
+
+ #region [ Methods ]
+
+ public MeasurementType GetMeasurementType(AdoDataConnection connection)
+ {
+ if ((object)connection == null)
+ return null;
+
+ TableOperations measurementTypeTable = new TableOperations(connection);
+ return measurementTypeTable.QueryRecordWhere("ID = {0}", MeasurementTypeID);
+ }
+
+ public MeasurementCharacteristic GetMeasurementCharacteristic(AdoDataConnection connection)
+ {
+ if ((object)connection == null)
+ return null;
+
+ TableOperations measurementCharacteristicTable = new TableOperations(connection);
+ return measurementCharacteristicTable.QueryRecordWhere("ID = {0}", MeasurementCharacteristicID);
+ }
+
+ public Phase GetPhase(AdoDataConnection connection)
+ {
+ if ((object)connection == null)
+ return null;
+
+ TableOperations phaseTable = new TableOperations(connection);
+ return phaseTable.QueryRecordWhere("ID = {0}", PhaseID);
+ }
+
+ public Meter GetMeter(AdoDataConnection connection)
+ {
+ if ((object)connection == null)
+ return null;
+
+ TableOperations meterTable = new TableOperations(connection);
+ return meterTable.QueryRecordWhere("ID = {0}", MeterID);
+ }
+
+ public Asset GetAsset(AdoDataConnection connection)
+ {
+ if ((object)connection == null)
+ return null;
+
+ TableOperations assetTable = new TableOperations(connection);
+ return assetTable.QueryRecordWhere("ID = {0}", AssetID);
+ }
+
+ public IEnumerable GetSeries(AdoDataConnection connection)
+ {
+ if ((object)connection == null)
+ return null;
+
+ TableOperations seriesTable = new TableOperations(connection);
+ return seriesTable.QueryRecordsWhere("ChannelID = {0}", ID);
+ }
+
+ private MeasurementType QueryMeasurementType()
+ {
+ MeasurementType measurementType;
+
+ using (AdoDataConnection connection = ConnectionFactory?.Invoke())
+ {
+ measurementType = GetMeasurementType(connection);
+ }
+
+ return LazyContext.GetMeasurementType(measurementType);
+ }
+
+ private MeasurementCharacteristic QueryMeasurementCharacteristic()
+ {
+ MeasurementCharacteristic measurementCharacteristic;
+
+ using (AdoDataConnection connection = ConnectionFactory?.Invoke())
+ {
+ measurementCharacteristic = GetMeasurementCharacteristic(connection);
+ }
+
+ return LazyContext.GetMeasurementCharacteristic(measurementCharacteristic);
+ }
+
+ private Phase QueryPhase()
+ {
+ Phase phase;
+
+ using (AdoDataConnection connection = ConnectionFactory?.Invoke())
+ {
+ phase = GetPhase(connection);
+ }
+
+ return LazyContext.GetPhase(phase);
+ }
+
+ private Meter QueryMeter()
+ {
+ Meter meter;
+
+ using (AdoDataConnection connection = ConnectionFactory?.Invoke())
+ {
+ meter = GetMeter(connection);
+ }
+
+ if ((object)meter != null)
+ meter.LazyContext = LazyContext;
+
+ return LazyContext.GetMeter(meter);
+ }
+
+ private Asset QueryAsset()
+ {
+ Asset asset;
+
+ using (AdoDataConnection connection = ConnectionFactory?.Invoke())
+ {
+ asset = GetAsset(connection);
+ }
+
+ if ((object)asset != null)
+ asset.LazyContext = LazyContext;
+
+ return LazyContext.GetAsset(asset);
+ }
+
+ private List QuerySeries()
+ {
+ List seriesList;
+
+ using (AdoDataConnection connection = ConnectionFactory?.Invoke())
+ {
+ seriesList = GetSeries(connection)?
+ .Select(LazyContext.GetSeries)
+ .ToList();
+ }
+
+ if ((object)seriesList != null)
+ {
+ foreach (Series series in seriesList)
+ {
+ series.Channel = this;
+ series.LazyContext = LazyContext;
+ }
+ }
+
+ return seriesList;
+ }
+
+ #endregion
+ }
+
+ public class ChannelComparer : IEqualityComparer
+ {
+ public bool Equals(Channel x, Channel y)
+ {
+ if (Object.ReferenceEquals(x, y)) return true;
+
+ //Check whether any of the compared objects is null.
+ if (Object.ReferenceEquals(x, null) || Object.ReferenceEquals(y, null))
+ return false;
+
+ //Check whether the channels are equal.
+ return x.ID == y.ID;
+ }
+
+ public int GetHashCode(Channel obj)
+ {
+ return obj.ID;
+ }
+ }
+
+ public class ChannelInfo
+ {
+ [PrimaryKey(true)]
+ public int ChannelID { get; set; }
+
+ public string ChannelName { get; set; }
+
+ public string ChannelDescription { get; set; }
+
+ public string MeasurementType { get; set; }
+
+ public string MeasurementCharacteristic { get; set; }
+
+ public string Phase { get; set; }
+
+ public string SeriesType { get; set; }
+
+ public string Orientation { get; set; }
+
+ public string Phasing { get; set; }
+ }
+
+ public static partial class TableOperationsExtensions
+ {
+ public static DashSettings GetOrAdd(this TableOperations table, string name, string value, bool enabled = true)
+ {
+ TransactionScopeOption required = TransactionScopeOption.Required;
+
+ TransactionOptions transactionOptions = new TransactionOptions()
+ {
+ IsolationLevel = IsolationLevel.ReadCommitted,
+ Timeout = TransactionManager.MaximumTimeout
+ };
+
+ DashSettings dashSettings;
+
+ using (TransactionScope transactionScope = new TransactionScope(required, transactionOptions))
+ {
+ if (value.Contains(","))
+ dashSettings = table.QueryRecordWhere("Name = {0} AND SUBSTRING(Value, 0, CHARINDEX(',', Value)) = {1}", name, value.Split(',').First());
+ else
+ dashSettings = table.QueryRecordWhere("Name = {0} AND Value = {1}", name, value);
+
+ if ((object)dashSettings == null)
+ {
+ dashSettings = new DashSettings();
+ dashSettings.Name = name;
+ dashSettings.Value = value;
+ dashSettings.Enabled = enabled;
+
+ table.AddNewRecord(dashSettings);
+
+ dashSettings.ID = table.Connection.ExecuteScalar("SELECT @@IDENTITY");
+ }
+
+ transactionScope.Complete();
+ }
+
+ return dashSettings;
+ }
+
+ public static UserDashSettings GetOrAdd(this TableOperations table, string name, Guid user, string value, bool enabled = true)
+ {
+ TransactionScopeOption required = TransactionScopeOption.Required;
+
+ TransactionOptions transactionOptions = new TransactionOptions()
+ {
+ IsolationLevel = IsolationLevel.ReadCommitted,
+ Timeout = TransactionManager.MaximumTimeout
+ };
+
+ UserDashSettings dashSettings;
+
+ using (TransactionScope transactionScope = new TransactionScope(required, transactionOptions))
+ {
+ if (value.Contains(","))
+ dashSettings = table.QueryRecordWhere("Name = {0} AND SUBSTRING(Value, 0, CHARINDEX(',', Value)) = {1} AND UserAccountID = {2}", name, value.Split(',').First(), user);
+ else
+ dashSettings = table.QueryRecordWhere("Name = {0} AND Value = {1} AND UserAccountID = {2}", name, value, user);
+
+ if ((object)dashSettings == null)
+ {
+ dashSettings = new UserDashSettings();
+ dashSettings.Name = name;
+ dashSettings.Value = value;
+ dashSettings.Enabled = enabled;
+ dashSettings.UserAccountID = user;
+
+ table.AddNewRecord(dashSettings);
+
+ dashSettings.ID = table.Connection.ExecuteScalar("SELECT @@IDENTITY");
+ }
+
+ transactionScope.Complete();
+ }
+
+ return dashSettings;
+ }
+
+ }
+
+}
\ No newline at end of file
diff --git a/src/Libraries/openXDA.Model/Channels/ChannelData.cs b/src/Libraries/openXDA.Model/Channels/ChannelData.cs
new file mode 100644
index 00000000..2ce28f94
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Channels/ChannelData.cs
@@ -0,0 +1,649 @@
+//******************************************************************************************************
+// ChannelData.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 12/12/2019 - C. Lackner
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System.Data;
+using Gemstone;
+using Gemstone.Data;
+using Gemstone.Data.DataExtensions;
+using Gemstone.Data.Model;
+using Ionic.Zlib;
+
+namespace openXDA.Model
+{
+ [TableName("ChannelData")]
+ public class ChannelData
+ {
+ #region [ Members ]
+
+ // Nested Types
+ private class DigitalSection
+ {
+ public DateTime Start { get; set; }
+ public DateTime End { get; set; }
+ public int NumPoints { get; set; }
+ public double Value { get; set; }
+ public static int Size => 2 * sizeof(long) + sizeof(ushort) + sizeof(int);
+
+ public int CopyBytes(byte[] byteArray, int offset, double compressionScale, double compressionOffset)
+ {
+ ushort compressedValue = (ushort)Math.Round((Value - compressionOffset) * compressionScale);
+ const ushort NaNValue = ushort.MaxValue;
+
+ if (compressedValue == NaNValue)
+ compressedValue--;
+
+ if (double.IsNaN(Value))
+ compressedValue = NaNValue;
+
+ int startOffset = offset;
+ offset += LittleEndian.CopyBytes(Start.Ticks, byteArray, offset);
+ offset += LittleEndian.CopyBytes(End.Ticks, byteArray, offset);
+ offset += LittleEndian.CopyBytes(compressedValue, byteArray, offset);
+ offset += LittleEndian.CopyBytes(NumPoints, byteArray, offset);
+ return offset - startOffset;
+ }
+
+ public static DigitalSection FromBytes(byte[] bytes, int offset, double decompressionOffset, double decompressionScale)
+ {
+ DigitalSection section = new DigitalSection();
+
+ section.Start = new DateTime(LittleEndian.ToInt64(bytes, offset));
+ offset += sizeof(long);
+
+ section.End = new DateTime(LittleEndian.ToInt64(bytes, offset));
+ offset += sizeof(long);
+
+ ushort compressedValue = LittleEndian.ToUInt16(bytes, offset);
+ section.Value = decompressionScale * compressedValue + decompressionOffset;
+ offset += sizeof(ushort);
+
+ section.NumPoints = LittleEndian.ToInt32(bytes, offset);
+
+ return section;
+ }
+ }
+ #endregion
+
+ #region [ Properties ]
+
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ public int SeriesID { get; set; }
+
+ public int EventID { get; set; }
+
+ public byte[] TimeDomainData { get; set; }
+
+ public int MarkedForDeletion { get; set; }
+
+ #endregion
+
+ #region [ Methods ]
+
+ ///
+ /// Adjusts the TimeDomain Data by Moving it a certain ammount of Time
+ ///
+ /// The number of Ticks the Data is moved. For moving it backwards in Time this needs to be < 0
+ public void AdjustData(Ticks ticks)
+ {
+ // Initially we assume Data is already migrated...
+ if (TimeDomainData == null)
+ return;
+
+ Tuple> decompressed = Decompress(TimeDomainData)[0];
+ List data = decompressed.Item2;
+
+ foreach (DataPoint dataPoint in data)
+ dataPoint.Time = dataPoint.Time.AddTicks(ticks);
+
+ TimeDomainData = ToData(data, decompressed.Item1);
+ }
+
+ #endregion
+
+ #region [ Static ]
+
+ public static List DataFromEvent(int eventID, Func connectionFactory)
+ {
+ using (AdoDataConnection connection = connectionFactory())
+ {
+ TableOperations eventTable = new TableOperations(connection);
+ Event evt = eventTable.QueryRecordWhere("ID = {0}", eventID);
+
+ TableOperations assetTable = new TableOperations(connection);
+ Asset asset = assetTable.QueryRecordWhere("ID = {0}", evt.AssetID);
+ asset.ConnectionFactory = connectionFactory;
+
+ List channels = asset.DirectChannels
+ .Concat(asset.ConnectedChannels)
+ .Where(channel => channel.MeterID == evt.MeterID)
+ .ToList();
+
+ if (!channels.Any())
+ return new List();
+
+ IEnumerable assetIDs = channels
+ .Select(channel => channel.AssetID)
+ .Distinct();
+
+ foreach (int assetID in assetIDs)
+ MigrateLegacyBlob(connection, evt.FileGroupID, assetID, evt.StartTime);
+
+ // Optimization to avoid individually querying channels that don't have any data
+ HashSet channelsWithData = QueryChannelsWithData(connection, evt);
+ channels.RemoveAll(channel => !channelsWithData.Contains(channel.ID));
+
+ List eventData = new List();
+
+ foreach (Channel channel in channels)
+ {
+ const string DataQueryFormat =
+ "SELECT ChannelData.TimeDomainData " +
+ "FROM " +
+ " ChannelData JOIN " +
+ " Series ON ChannelData.SeriesID = Series.ID JOIN " +
+ " Event ON ChannelData.EventID = Event.ID " +
+ "WHERE " +
+ " Event.FileGroupID = {0} AND " +
+ " Series.ChannelID = {1} AND " +
+ " Event.StartTime = {2}";
+
+ object startTime2 = ToDateTime2(connection, evt.StartTime);
+ byte[] timeDomainData = connection.ExecuteScalar(DataQueryFormat, evt.FileGroupID, channel.ID, startTime2);
+
+ if (timeDomainData is null)
+ continue;
+
+ eventData.Add(timeDomainData);
+ }
+
+ return eventData;
+ }
+ }
+
+ public static byte[] DataFromEvent(int eventID, int channelID, Func connectionFactory)
+ {
+ using (AdoDataConnection connection = connectionFactory())
+ {
+ TableOperations eventTable = new TableOperations(connection);
+ Event evt = eventTable.QueryRecordWhere("ID = {0}", eventID);
+ MigrateLegacyBlob(connection, evt);
+
+ const string QueryFormat =
+ "SELECT ChannelData.TimeDomainData " +
+ "FROM " +
+ " ChannelData JOIN " +
+ " Series ON ChannelData.SeriesID = Series.ID " +
+ "WHERE " +
+ " ChannelData.EventID = {0} AND " +
+ " Series.ChannelID = {1}";
+
+ return connection.ExecuteScalar(QueryFormat, eventID, channelID);
+ }
+ }
+
+ private static void MigrateLegacyBlob(AdoDataConnection connection, int fileGroupID, int assetID, DateTime startTime)
+ {
+ const string AssetQueryFilter = "FileGroupID = {0} AND AssetID = {1} AND StartTime = {2}";
+ object startTime2 = ToDateTime2(connection, startTime);
+
+ TableOperations eventTable = new TableOperations(connection);
+ Event evt = eventTable.QueryRecordWhere(AssetQueryFilter, fileGroupID, assetID, startTime2);
+ MigrateLegacyBlob(connection, evt);
+ }
+
+ private static void MigrateLegacyBlob(AdoDataConnection connection, Event evt)
+ {
+ if (evt is null || evt.EventDataID is null)
+ return;
+
+ int eventDataID = evt.EventDataID.GetValueOrDefault();
+ byte[] timeDomainData = connection.ExecuteScalar("SELECT TimeDomainData FROM EventData WHERE ID = {0}", eventDataID);
+ List>> decompressedData = Decompress(timeDomainData);
+
+ TableOperations channelDataTable = new TableOperations(connection);
+
+ foreach (Tuple> tuple in decompressedData)
+ {
+ int seriesID = tuple.Item1;
+ List data = tuple.Item2;
+
+ ChannelData channelData = new ChannelData();
+ channelData.SeriesID = seriesID;
+ channelData.EventID = evt.ID;
+ channelData.TimeDomainData = ToData(data, seriesID);
+ channelDataTable.AddNewRecord(channelData);
+ }
+
+ connection.ExecuteNonQuery("UPDATE Event SET EventDataID = NULL WHERE ID = {0}", evt.ID);
+ connection.ExecuteNonQuery("DELETE FROM EventData WHERE ID = {0}", eventDataID);
+ }
+
+ ///
+ /// Turns a list of DataPoints into a blob to be saved in the database.
+ ///
+ /// The data as a
+ /// The SeriesID to be encoded into the blob
+ /// The byte array to be saved as a blob in the database.
+ public static byte[] ToData(List data, int seriesID)
+ {
+ // We can use Digital compression if the data changes no more than 10% of the time.
+ bool useDigitalCompression = data
+ .Skip(1)
+ .Zip(data, (p2, p1) => new { p1, p2 })
+ .Where(obj => obj.p1.Value != obj.p2.Value)
+ .Select((_, index) => index + 1)
+ .All(nChanges => nChanges <= 0.1 * data.Count);
+
+ if (useDigitalCompression)
+ return ToDigitalData(data, seriesID);
+
+ var timeSeries = data.Select(dataPoint => new { Time = dataPoint.Time.Ticks, Compressed = false }).ToList();
+
+ for (int i = 1; i < timeSeries.Count; i++)
+ {
+ long previousTimestamp = data[i - 1].Time.Ticks;
+ long timestamp = timeSeries[i].Time;
+ long diff = timestamp - previousTimestamp;
+
+ if (diff >= 0 && diff <= ushort.MaxValue)
+ timeSeries[i] = new { Time = diff, Compressed = true };
+ }
+
+ int timeSeriesByteLength = timeSeries.Sum(obj => obj.Compressed ? sizeof(ushort) : sizeof(int) + sizeof(long));
+ int dataSeriesByteLength = sizeof(int) + (2 * sizeof(double)) + (data.Count * sizeof(ushort));
+ int totalByteLength = sizeof(int) + timeSeriesByteLength + dataSeriesByteLength;
+
+ byte[] result = new byte[totalByteLength];
+ int offset = 0;
+
+ offset += LittleEndian.CopyBytes(data.Count, result, offset);
+
+ List uncompressedIndexes = timeSeries
+ .Select((obj, Index) => new { obj.Compressed, Index })
+ .Where(obj => !obj.Compressed)
+ .Select(obj => obj.Index)
+ .ToList();
+
+ for (int i = 0; i < uncompressedIndexes.Count; i++)
+ {
+ int index = uncompressedIndexes[i];
+ int nextIndex = (i + 1 < uncompressedIndexes.Count) ? uncompressedIndexes[i + 1] : timeSeries.Count;
+
+ offset += LittleEndian.CopyBytes(nextIndex - index, result, offset);
+ offset += LittleEndian.CopyBytes(timeSeries[index].Time, result, offset);
+
+ for (int j = index + 1; j < nextIndex; j++)
+ offset += LittleEndian.CopyBytes((ushort)timeSeries[j].Time, result, offset);
+ }
+
+ const ushort NaNValue = ushort.MaxValue;
+ const ushort MaxCompressedValue = ushort.MaxValue - 1;
+ double range = data.Select(item => item.Value).Max() - data.Select(item => item.Value).Min();
+ double decompressionOffset = data.Select(item => item.Value).Min();
+ double decompressionScale = range / MaxCompressedValue;
+ double compressionScale = (decompressionScale != 0.0D) ? 1.0D / decompressionScale : 0.0D;
+
+ offset += LittleEndian.CopyBytes(seriesID, result, offset);
+ offset += LittleEndian.CopyBytes(decompressionOffset, result, offset);
+ offset += LittleEndian.CopyBytes(decompressionScale, result, offset);
+
+ foreach (DataPoint dataPoint in data)
+ {
+ ushort compressedValue = (ushort)Math.Round((dataPoint.Value - decompressionOffset) * compressionScale);
+
+ if (compressedValue == NaNValue)
+ compressedValue--;
+
+ if (double.IsNaN(dataPoint.Value))
+ compressedValue = NaNValue;
+
+ offset += LittleEndian.CopyBytes(compressedValue, result, offset);
+ }
+
+ byte[] returnArray = GZipStream.CompressBuffer(result);
+ returnArray[0] = 0x44;
+ returnArray[1] = 0x33;
+
+ return returnArray;
+ }
+
+ private static byte[] ToDigitalData(List data, int seriesID)
+ {
+ List digitalData = new List();
+ DigitalSection currentSection = null;
+ foreach (DataPoint dataPoint in data)
+ {
+ if (currentSection is null)
+ {
+ currentSection = new DigitalSection()
+ {
+ Start = dataPoint.Time,
+ End = dataPoint.Time,
+ Value = dataPoint.Value,
+ NumPoints = 1
+ };
+ }
+ else if (currentSection.Value != dataPoint.Value)
+ {
+ digitalData.Add(currentSection);
+ currentSection = new DigitalSection()
+ {
+ Start = dataPoint.Time,
+ End = dataPoint.Time,
+ Value = dataPoint.Value,
+ NumPoints = 1
+ };
+ }
+ else
+ {
+ currentSection.NumPoints++;
+ currentSection.End = dataPoint.Time;
+ }
+ }
+
+ if (!(currentSection is null))
+ digitalData.Add(currentSection);
+
+ int totalByteLength = sizeof(int) + 2 * sizeof(double) + digitalData.Count * DigitalSection.Size;
+ byte[] result = new byte[totalByteLength];
+ int offset = 0;
+
+ const ushort MaxCompressedValue = ushort.MaxValue - 1;
+ double range = data.Select(item => item.Value).Max() - data.Select(item => item.Value).Min();
+ double decompressionOffset = data.Select(item => item.Value).Min();
+ double decompressionScale = range / MaxCompressedValue;
+ double compressionScale = (decompressionScale != 0.0D) ? 1.0D / decompressionScale : 0.0D;
+
+ offset += LittleEndian.CopyBytes(seriesID, result, offset);
+ offset += LittleEndian.CopyBytes(decompressionOffset, result, offset);
+ offset += LittleEndian.CopyBytes(decompressionScale, result, offset);
+
+ foreach (DigitalSection digitalSection in digitalData)
+ offset += digitalSection.CopyBytes(result, offset, compressionScale, decompressionOffset);
+
+ byte[] returnArray = GZipStream.CompressBuffer(result);
+ returnArray[0] = DigitalHeader[0];
+ returnArray[1] = DigitalHeader[1];
+ return returnArray;
+ }
+
+ ///
+ /// Decompresses a byte array into a List of DataPoints
+ ///
+ /// The byte array filled with compressed data
+ /// List of data series consisting of series ID and data points.
+ public static List>> Decompress(byte[] data)
+ {
+ List>> result = new List>>();
+
+ if (data == null)
+ return result;
+ // If the blob contains the GZip header,
+ // use the legacy deserialization algorithm
+ if (data[0] == LegacyHeader[0] && data[1] == LegacyHeader[1])
+ {
+ return Decompress_Legacy(data);
+ }
+ // If this blob uses digital decompression use that algorithm
+ if (data[0] == DigitalHeader[0] && data[1] == DigitalHeader[1])
+ {
+ return Decompress_Digital(data);
+ }
+
+ // Restore the GZip header before uncompressing
+ data[0] = LegacyHeader[0];
+ data[1] = LegacyHeader[1];
+
+ byte[] uncompressedData;
+ int offset;
+
+ uncompressedData = GZipStream.UncompressBuffer(data);
+ offset = 0;
+
+ int m_samples = LittleEndian.ToInt32(uncompressedData, offset);
+ offset += sizeof(int);
+
+ List times = new List();
+
+ while (times.Count < m_samples)
+ {
+ int timeValues = LittleEndian.ToInt32(uncompressedData, offset);
+ offset += sizeof(int);
+
+ long currentValue = LittleEndian.ToInt64(uncompressedData, offset);
+ offset += sizeof(long);
+ times.Add(new DateTime(currentValue));
+
+ for (int i = 1; i < timeValues; i++)
+ {
+ currentValue += LittleEndian.ToUInt16(uncompressedData, offset);
+ offset += sizeof(ushort);
+ times.Add(new DateTime(currentValue));
+ }
+ }
+
+ while (offset < uncompressedData.Length)
+ {
+ List dataSeries = new List();
+ int seriesID = LittleEndian.ToInt32(uncompressedData, offset);
+ offset += sizeof(int);
+
+
+ const ushort NaNValue = ushort.MaxValue;
+ double decompressionOffset = LittleEndian.ToDouble(uncompressedData, offset);
+ double decompressionScale = LittleEndian.ToDouble(uncompressedData, offset + sizeof(double));
+ offset += 2 * sizeof(double);
+
+ for (int i = 0; i < m_samples; i++)
+ {
+ ushort compressedValue = LittleEndian.ToUInt16(uncompressedData, offset);
+ offset += sizeof(ushort);
+
+ double decompressedValue = decompressionScale * compressedValue + decompressionOffset;
+
+ if (compressedValue == NaNValue)
+ decompressedValue = double.NaN;
+
+ dataSeries.Add(new DataPoint()
+ {
+ Time = times[i],
+ Value = decompressedValue
+ });
+ }
+
+ result.Add(new Tuple>(seriesID, dataSeries));
+ }
+
+ return result;
+ }
+
+ private static List>> Decompress_Legacy(byte[] data)
+ {
+ List>> result = new List>>();
+ byte[] uncompressedData;
+ int offset;
+ DateTime[] times;
+ int seriesID;
+
+ uncompressedData = GZipStream.UncompressBuffer(data);
+ offset = 0;
+
+ int m_samples = LittleEndian.ToInt32(uncompressedData, offset);
+ offset += sizeof(int);
+
+ times = new DateTime[m_samples];
+
+ for (int i = 0; i < m_samples; i++)
+ {
+ times[i] = new DateTime(LittleEndian.ToInt64(uncompressedData, offset));
+ offset += sizeof(long);
+ }
+
+ while (offset < uncompressedData.Length)
+ {
+ seriesID = LittleEndian.ToInt32(uncompressedData, offset);
+ offset += sizeof(int);
+
+ List points = new List();
+
+ for (int i = 0; i < m_samples; i++)
+ {
+ points.Add(new DataPoint()
+ {
+ Time = times[i],
+ Value = LittleEndian.ToDouble(uncompressedData, offset)
+ });
+
+ offset += sizeof(double);
+ }
+
+ result.Add(new Tuple>(seriesID, points));
+ }
+ return result;
+ }
+
+ ///
+ /// Decompresses a Digital stored as compresed series of changes
+ ///
+ /// The compressed
+ /// a Dictionary mapping a SeriesID to a decopmressed
+ private static List>> Decompress_Digital(byte[] data)
+ {
+ List>> result = new List>>();
+ byte[] uncompressedData;
+ int offset;
+ int seriesID;
+ List points = new List();
+
+ // Restore the GZip header before uncompressing
+ data[0] = LegacyHeader[0];
+ data[1] = LegacyHeader[1];
+
+ uncompressedData = GZipStream.UncompressBuffer(data);
+ offset = 0;
+
+ seriesID = LittleEndian.ToInt32(uncompressedData, offset);
+ offset += sizeof(int);
+
+ double decompressionOffset = LittleEndian.ToDouble(uncompressedData, offset);
+ double decompressionScale = LittleEndian.ToDouble(uncompressedData, offset + sizeof(double));
+ offset += 2 * sizeof(double);
+
+ while(offset < uncompressedData.Length)
+ {
+ DigitalSection section = DigitalSection.FromBytes(uncompressedData, offset, decompressionOffset, decompressionScale);
+ offset += DigitalSection.Size;
+
+ points.Add(new DataPoint()
+ {
+ Time = section.Start,
+ Value = section.Value
+ });
+
+ if (section.NumPoints == 1)
+ continue;
+
+ // Use a fixed-point offset with 6 bits of additional
+ // precision to help avoid accumulation of rounding errors
+ long diff = (section.End - section.Start).Ticks << 6;
+ long step = diff / (section.NumPoints - 1);
+ long lastOffset = step;
+
+ for (int i = 1; i < section.NumPoints - 1; i++)
+ {
+ points.Add(new DataPoint()
+ {
+ Time = section.Start.AddTicks(lastOffset >> 6),
+ Value = section.Value
+ });
+
+ lastOffset += step;
+ }
+
+ points.Add(new DataPoint()
+ {
+ Time = section.End,
+ Value = section.Value
+ });
+ }
+
+ result.Add(new Tuple>(seriesID, points));
+
+ return result;
+ }
+
+ private static HashSet QueryChannelsWithData(AdoDataConnection connection, Event evt)
+ {
+ const string FilterQueryFormat =
+ "SELECT Series.ChannelID " +
+ "FROM " +
+ " ChannelData JOIN " +
+ " Series ON ChannelData.SeriesID = Series.ID JOIN " +
+ " Event ON ChannelData.EventID = Event.ID " +
+ "WHERE " +
+ " Event.FileGroupID = {0} AND " +
+ " Event.StartTime = {1}";
+
+ object startTime2 = ToDateTime2(connection, evt.StartTime);
+
+ using (DataTable table = connection.RetrieveData(FilterQueryFormat, evt.FileGroupID, startTime2))
+ {
+ IEnumerable channelsWithData = table
+ .AsEnumerable()
+ .Select(row => row.ConvertField("ChannelID"));
+
+ return new HashSet(channelsWithData);
+ }
+ }
+
+ private static object ToDateTime2(AdoDataConnection connection, DateTime dateTime)
+ {
+ using (IDbCommand command = connection.Connection.CreateCommand())
+ {
+ IDbDataParameter parameter = command.CreateParameter();
+ parameter.DbType = DbType.DateTime2;
+ parameter.Value = dateTime;
+ return parameter;
+ }
+ }
+
+ ///
+ /// The header of a datablob compressed as analog Data
+ ///
+ public static readonly byte[] AnalogHeader = { 0x11, 0x11 };
+
+ ///
+ /// The header of a datablob compressed as Digital State Changes
+ ///
+ public static readonly byte[] DigitalHeader = { 0x22, 0x22 };
+
+ ///
+ /// The header of a datablob compressed as Legacy Data
+ ///
+ public static readonly byte[] LegacyHeader = { 0x1F, 0x8B };
+
+ #endregion
+ }
+}
diff --git a/src/Libraries/openXDA.Model/Channels/DataPoint.cs b/src/Libraries/openXDA.Model/Channels/DataPoint.cs
new file mode 100644
index 00000000..88c39332
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Channels/DataPoint.cs
@@ -0,0 +1,110 @@
+//******************************************************************************************************
+// DataPoint.cs - Gbtc
+//
+// Copyright © 2025, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 05/15/2025 - C. Lackner
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+namespace openXDA.Model
+{
+ ///
+ /// Represents a single data point in a time series.
+ ///
+ public class DataPoint
+ {
+ #region [ Properties ]
+
+ public DateTime Time { get; set; }
+ public double Value { get; set; }
+
+ #endregion
+
+ #region [ Methods ]
+
+ public DataPoint Shift(TimeSpan timeShift)
+ {
+ return new DataPoint()
+ {
+ Time = Time.Add(timeShift),
+ Value = Value
+ };
+ }
+
+ public DataPoint Negate()
+ {
+ return new DataPoint()
+ {
+ Time = Time,
+ Value = -Value
+ };
+ }
+
+ public DataPoint Add(DataPoint point)
+ {
+ if (Time != point.Time)
+ throw new InvalidOperationException("Cannot add datapoints with mismatched times");
+
+ return new DataPoint()
+ {
+ Time = Time,
+ Value = Value + point.Value
+ };
+ }
+
+ public DataPoint Subtract(DataPoint point)
+ {
+ return Add(point.Negate());
+ }
+
+ public DataPoint Add(double value)
+ {
+ return new DataPoint()
+ {
+ Time = Time,
+ Value = Value + value
+ };
+ }
+
+ public DataPoint Subtract(double value)
+ {
+ return Add(-value);
+ }
+
+ public DataPoint Multiply(double value)
+ {
+ return new DataPoint()
+ {
+ Time = Time,
+ Value = Value * value
+ };
+ }
+
+ public bool LargerThan(double comparison)
+ {
+ return Value > comparison;
+ }
+
+ public bool LargerThan(DataPoint point)
+ {
+ return LargerThan(point.Value);
+ }
+
+ #endregion
+ }
+}
diff --git a/src/Libraries/openXDA.Model/Channels/MeasurementCharacteristic.cs b/src/Libraries/openXDA.Model/Channels/MeasurementCharacteristic.cs
new file mode 100644
index 00000000..a3e49898
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Channels/MeasurementCharacteristic.cs
@@ -0,0 +1,45 @@
+//******************************************************************************************************
+// MeasurementCharacteristic.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/29/2017 - Billy Ernest
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System.ComponentModel.DataAnnotations;
+using Gemstone.Data.Model;
+
+namespace openXDA.Model
+{
+ [PostRoles("Administrator, Transmission SME")]
+ [DeleteRoles("Administrator, Transmission SME")]
+ [PatchRoles("Administrator, Transmission SME")]
+ public class MeasurementCharacteristic
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ [StringLength(200)]
+ [DefaultSortOrder]
+ public string Name { get; set; }
+
+ public string Description { get; set; }
+
+ public bool Display { get; set; }
+ }
+}
diff --git a/src/Libraries/openXDA.Model/Channels/MeasurementType.cs b/src/Libraries/openXDA.Model/Channels/MeasurementType.cs
new file mode 100644
index 00000000..20d3ea30
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Channels/MeasurementType.cs
@@ -0,0 +1,44 @@
+//******************************************************************************************************
+// MeasurementType.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/29/2017 - Billy Ernest
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System;
+using System.ComponentModel.DataAnnotations;
+using Gemstone.Data.Model;
+
+namespace openXDA.Model
+{
+ [PostRoles("Administrator, Transmission SME")]
+ [DeleteRoles("Administrator, Transmission SME")]
+ [PatchRoles("Administrator, Transmission SME")]
+ public class MeasurementType
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ [StringLength(200)]
+ [DefaultSortOrder]
+ public string Name { get; set; }
+
+ public string Description { get; set; }
+ }
+}
diff --git a/src/Libraries/openXDA.Model/Channels/Phase.cs b/src/Libraries/openXDA.Model/Channels/Phase.cs
new file mode 100644
index 00000000..705ec274
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Channels/Phase.cs
@@ -0,0 +1,43 @@
+//******************************************************************************************************
+// Phase.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/29/2017 - Billy Ernest
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System.ComponentModel.DataAnnotations;
+using Gemstone.Data.Model;
+
+namespace openXDA.Model
+{
+ [PostRoles("Administrator, Transmission SME")]
+ [DeleteRoles("Administrator, Transmission SME")]
+ [PatchRoles("Administrator, Transmission SME")]
+ public class Phase
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ [StringLength(200)]
+ [DefaultSortOrder]
+ public string Name { get; set; }
+
+ public string Description { get; set; }
+ }
+}
diff --git a/src/Libraries/openXDA.Model/Channels/Series.cs b/src/Libraries/openXDA.Model/Channels/Series.cs
new file mode 100644
index 00000000..d6c96a6c
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Channels/Series.cs
@@ -0,0 +1,248 @@
+//******************************************************************************************************
+// Series.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 06/20/2017 - Billy Ernest
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System.Data;
+using Gemstone.Data;
+using Gemstone.Data.Model;
+using Newtonsoft.Json;
+
+namespace openXDA.Model
+{
+ public class SeriesKey : IEquatable
+ {
+ #region [ Constructors ]
+
+ public SeriesKey(ChannelKey channelKey, string seriesType)
+ {
+ ChannelKey = channelKey;
+ SeriesType = seriesType;
+ }
+
+ public SeriesKey(Series series)
+ : this(new ChannelKey(series.Channel), series.SeriesType.Name)
+ {
+ }
+
+ #endregion
+
+ #region [ Properties ]
+
+ public ChannelKey ChannelKey { get; }
+ public string SeriesType { get; }
+
+ #endregion
+
+ #region [ Methods ]
+
+ public Series Find(AdoDataConnection connection, int meterID)
+ {
+ const string QueryFormat =
+ "SELECT Series.* " +
+ "FROM " +
+ " Series JOIN " +
+ " Channel ON Series.ChannelID = Channel.ID JOIN " +
+ " MeasurementType ON Channel.MeasurementTypeID = MeasurementType.ID JOIN " +
+ " MeasurementCharacteristic ON Channel.MeasurementCharacteristicID = MeasurementCharacteristic.ID JOIN " +
+ " Phase ON Channel.PhaseID = Phase.ID JOIN " +
+ " SeriesType ON Series.SeriesTypeID = SeriesType.ID " +
+ "WHERE " +
+ " Channel.MeterID = {0} AND " +
+ " Channel.AssetID = {1} AND " +
+ " Channel.HarmonicGroup = {2} AND " +
+ " Channel.Name = {3} AND " +
+ " MeasurementType.Name = {4} AND " +
+ " MeasurementCharacteristic.Name = {5} AND " +
+ " Phase.Name = {6} AND " +
+ " SeriesType.Name = {7}";
+
+ object[] parameters =
+ {
+ meterID,
+ ChannelKey.LineID,
+ ChannelKey.HarmonicGroup,
+ ChannelKey.Name,
+ ChannelKey.MeasurementType,
+ ChannelKey.MeasurementCharacteristic,
+ ChannelKey.Phase,
+ SeriesType
+ };
+
+ using (DataTable table = connection.RetrieveData(QueryFormat, parameters))
+ {
+ if (table.Rows.Count == 0)
+ return null;
+
+ TableOperations seriesTable = new TableOperations(connection);
+ return seriesTable.LoadRecord(table.Rows[0]);
+ }
+ }
+
+ public override int GetHashCode()
+ {
+ StringComparer stringComparer = StringComparer.OrdinalIgnoreCase;
+
+ int hash = 1009;
+ hash = 9176 * hash + ChannelKey.GetHashCode();
+ hash = 9176 * hash + stringComparer.GetHashCode(SeriesType);
+ return hash;
+ }
+
+ public override bool Equals(object obj)
+ {
+ return Equals(obj as SeriesKey);
+ }
+
+ public bool Equals(SeriesKey other)
+ {
+ if (other is null)
+ return false;
+
+ StringComparison stringComparison = StringComparison.OrdinalIgnoreCase;
+
+ return
+ ChannelKey.Equals(other.ChannelKey) &&
+ SeriesType.Equals(other.SeriesType, stringComparison);
+ }
+
+ #endregion
+ }
+
+ public class Series
+ {
+ #region [ Members ]
+
+ // Fields
+ private SeriesType m_seriesType;
+ private Channel m_channel;
+
+ #endregion
+
+ #region [ Properties ]
+
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ public int ChannelID { get; set; }
+
+ public int SeriesTypeID { get; set; }
+
+ public string SourceIndexes { get; set; }
+
+ [JsonIgnore]
+ [NonRecordField]
+ public SeriesType SeriesType
+ {
+ get
+ {
+ if (m_seriesType is null)
+ m_seriesType = LazyContext.GetSeriesType(SeriesTypeID);
+
+ if (m_seriesType is null)
+ m_seriesType = QuerySeriesType();
+
+ return m_seriesType;
+ }
+ set => m_seriesType = value;
+ }
+
+ [JsonIgnore]
+ [NonRecordField]
+ public Channel Channel
+ {
+ get
+ {
+ if (m_channel is null)
+ m_channel = LazyContext.GetChannel(ChannelID);
+
+ if (m_channel is null)
+ m_channel = QueryChannel();
+
+ return m_channel;
+ }
+ set => m_channel = value;
+ }
+
+ [JsonIgnore]
+ [NonRecordField]
+ public Func ConnectionFactory
+ {
+ get => LazyContext.ConnectionFactory;
+ set => LazyContext.ConnectionFactory = value;
+ }
+
+ [JsonIgnore]
+ [NonRecordField]
+ internal LazyContext LazyContext { get; set; } = new LazyContext();
+
+ #endregion
+
+ #region [ Methods ]
+
+ public SeriesType GetSeriesType(AdoDataConnection connection)
+ {
+ if ((object)connection == null)
+ return null;
+
+ TableOperations seriesTypeTable = new TableOperations(connection);
+ return seriesTypeTable.QueryRecordWhere("ID = {0}", SeriesTypeID);
+ }
+
+ public Channel GetChannel(AdoDataConnection connection)
+ {
+ if ((object)connection == null)
+ return null;
+
+ TableOperations channelTable = new TableOperations(connection);
+ return channelTable.QueryRecordWhere("ID = {0}", ChannelID);
+ }
+
+ private SeriesType QuerySeriesType()
+ {
+ SeriesType seriesType;
+
+ using (AdoDataConnection connection = ConnectionFactory?.Invoke())
+ {
+ seriesType = GetSeriesType(connection);
+ }
+
+ return LazyContext.GetSeriesType(seriesType);
+ }
+
+ private Channel QueryChannel()
+ {
+ Channel channel;
+
+ using (AdoDataConnection connection = ConnectionFactory?.Invoke())
+ {
+ channel = GetChannel(connection);
+ }
+
+ if ((object)channel != null)
+ channel.LazyContext = LazyContext;
+
+ return LazyContext.GetChannel(channel);
+ }
+
+ #endregion
+ }
+}
diff --git a/src/Libraries/openXDA.Model/Channels/SeriesType.cs b/src/Libraries/openXDA.Model/Channels/SeriesType.cs
new file mode 100644
index 00000000..c7621b5d
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Channels/SeriesType.cs
@@ -0,0 +1,40 @@
+//******************************************************************************************************
+// SeriesType.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/29/2017 - Billy Ernest
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System.ComponentModel.DataAnnotations;
+using Gemstone.Data.Model;
+
+namespace openXDA.Model
+{
+ [TableName("SeriesType")]
+ public class SeriesType
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ [StringLength(200)]
+ public string Name { get; set; }
+
+ public string Description { get; set; }
+ }
+}
\ No newline at end of file
diff --git a/src/Libraries/openXDA.Model/Events/BreakerRestrike.cs b/src/Libraries/openXDA.Model/Events/BreakerRestrike.cs
new file mode 100644
index 00000000..e76661d5
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Events/BreakerRestrike.cs
@@ -0,0 +1,65 @@
+//******************************************************************************************************
+// BreakerRestrike.cs - Gbtc
+//
+// Copyright © 2019, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may not use this
+// file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/30/2019 - Stephen C. Wills
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System.Data;
+using Gemstone.Data;
+using Gemstone.Data.Model;
+
+namespace openXDA.Model
+{
+ public class BreakerRestrike
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ public int EventID { get; set; }
+
+ public int PhaseID { get; set; }
+
+ public int InitialExtinguishSample { get; set; }
+
+ [FieldDataType(DbType.DateTime2, DatabaseType.SQLServer)]
+ public DateTime InitialExtinguishTime { get; set; }
+ public double InitialExtinguishVoltage { get; set; }
+ public int RestrikeSample { get; set; }
+
+ [FieldDataType(DbType.DateTime2, DatabaseType.SQLServer)]
+ public DateTime RestrikeTime { get; set; }
+ public double RestrikeVoltage { get; set; }
+ public double RestrikeCurrentPeak { get; set; }
+ public double RestrikeVoltageDip { get; set; }
+ public int TransientPeakSample { get; set; }
+
+ [FieldDataType(DbType.DateTime2, DatabaseType.SQLServer)]
+ public DateTime TransientPeakTime { get; set; }
+ public double TransientPeakVoltage { get; set; }
+ public double PerUnitTransientPeakVoltage { get; set; }
+ public int FinalExtinguishSample { get; set; }
+
+ [FieldDataType(DbType.DateTime2, DatabaseType.SQLServer)]
+ public DateTime FinalExtinguishTime { get; set; }
+ public double FinalExtinguishVoltage { get; set; }
+ public double I2t { get; set; }
+
+ }
+}
diff --git a/src/Libraries/openXDA.Model/Events/Disturbances/Disturbance.cs b/src/Libraries/openXDA.Model/Events/Disturbances/Disturbance.cs
new file mode 100644
index 00000000..c83ce0e6
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Events/Disturbances/Disturbance.cs
@@ -0,0 +1,67 @@
+//******************************************************************************************************
+// Disturbance.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/29/2017 - Billy Ernest
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System;
+using Gemstone.Data.Model;
+
+namespace openXDA.Model
+{
+ public class Disturbance
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+ public int EventID { get; set; }
+ public int EventTypeID { get; set; }
+ public int PhaseID { get; set; }
+ public double Magnitude { get; set; }
+ public double PerUnitMagnitude { get; set; }
+
+ [FieldDataType(System.Data.DbType.DateTime2, Gemstone.Data.DatabaseType.SQLServer)]
+ public DateTime StartTime { get; set; }
+
+ [FieldDataType(System.Data.DbType.DateTime2, Gemstone.Data.DatabaseType.SQLServer)]
+ public DateTime EndTime { get; set; }
+
+ public double DurationSeconds { get; set; }
+ public double DurationCycles { get; set; }
+ public int StartIndex { get; set; }
+ public int EndIndex { get; set; }
+ public string UpdatedBy { get; set; }
+ }
+
+ [TableName("DisturbanceView")]
+ public class DisturbanceView: Disturbance
+ {
+ public int MeterID { get; set; }
+ public int LineID { get; set; }
+ public int? SeverityCode { get; set; }
+ public string MeterName { get; set; }
+ public string PhaseName { get; set; }
+ }
+
+ [TableName("DisturbanceView")]
+ public class DisturbancesForDay : DisturbanceView { }
+
+ [TableName("DisturbanceView")]
+ public class DisturbancesForMeter : DisturbanceView { }
+}
\ No newline at end of file
diff --git a/src/Libraries/openXDA.Model/Events/Event.cs b/src/Libraries/openXDA.Model/Events/Event.cs
new file mode 100644
index 00000000..992ad47e
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Events/Event.cs
@@ -0,0 +1,91 @@
+//******************************************************************************************************
+// Event.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/29/2017 - Billy Ernest
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System.Data;
+using Gemstone.Data;
+using Gemstone.Data.Model;
+
+namespace openXDA.Model
+{
+ [TableName("Event")]
+ public class Event
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ public int FileGroupID { get; set; }
+
+ public int MeterID { get; set; }
+
+ public int AssetID { get; set; }
+
+ public int EventTypeID { get; set; }
+
+ public int? EventDataID { get; set; }
+
+ public string Name { get; set; }
+
+ public string Alias { get; set; }
+
+ public string ShortName { get; set; }
+
+ [FieldDataType(DbType.DateTime2, DatabaseType.SQLServer)]
+ public DateTime StartTime { get; set; }
+
+ [FieldDataType(DbType.DateTime2, DatabaseType.SQLServer)]
+ public DateTime EndTime { get; set; }
+
+ public int Samples { get; set; }
+
+ public int TimeZoneOffset { get; set; }
+
+ public int SamplesPerSecond { get; set; }
+
+ public int SamplesPerCycle { get; set; }
+
+ public string Description { get; set; }
+
+ public int FileVersion { get; set; }
+
+ public string UpdatedBy { get; set; }
+ }
+
+ [TableName("EventView")]
+ public class EventView : Event
+ {
+ [PrimaryKey(true)]
+ public new int ID
+ {
+ get => base.ID;
+ set => base.ID = value;
+ }
+
+ public string AssetName { get; set; }
+
+ public string MeterName { get; set; }
+
+ public string StationName { get; set; }
+
+ public string EventTypeName { get; set; }
+ }
+}
\ No newline at end of file
diff --git a/src/Libraries/openXDA.Model/Events/EventStat.cs b/src/Libraries/openXDA.Model/Events/EventStat.cs
new file mode 100644
index 00000000..6e004a3e
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Events/EventStat.cs
@@ -0,0 +1,57 @@
+//******************************************************************************************************
+// EventStat.cs - Gbtc
+//
+// Copyright © 2018, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may not use this
+// file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 11/07/2018 - Billy Ernest
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using Gemstone.Data.Model;
+
+namespace openXDA.Model
+{
+ public class EventStat
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+ public int EventID { get; set; }
+ public double? VPeak { get; set; }
+ public double? VAMax { get; set; }
+ public double? VBMax { get; set; }
+ public double? VCMax { get; set; }
+ public double? VABMax { get; set; }
+ public double? VBCMax { get; set; }
+ public double? VCAMax { get; set; }
+ public double? VAMin { get; set; }
+ public double? VBMin { get; set; }
+ public double? VCMin { get; set; }
+ public double? VABMin { get; set; }
+ public double? VBCMin { get; set; }
+ public double? VCAMin { get; set; }
+ public double? IPeak { get; set; }
+ public double? IAMax { get; set; }
+ public double? IBMax { get; set; }
+ public double? ICMax { get; set; }
+ public double? IA2t { get; set; }
+ public double? IB2t { get; set; }
+ public double? IC2t { get; set; }
+ public double? InitialMW { get; set; }
+ public double? FinalMW { get; set; }
+ public int? PQViewID { get; set; }
+ }
+}
diff --git a/src/Libraries/openXDA.Model/Events/Faults/Fault.cs b/src/Libraries/openXDA.Model/Events/Faults/Fault.cs
new file mode 100644
index 00000000..dbf8b955
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Events/Faults/Fault.cs
@@ -0,0 +1,137 @@
+//******************************************************************************************************
+// Fault.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/29/2017 - Billy Ernest
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System;
+using Gemstone.Data.Model;
+
+namespace openXDA.Model
+{
+ [TableName("FaultSummary")]
+ public class Fault
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ public int EventID { get; set; }
+
+ public string Algorithm { get; set; }
+
+ public int FaultNumber { get; set; }
+
+ public int CalculationCycle { get; set; }
+
+ public double Distance { get; set; }
+
+ public int PathNumber { get; set; }
+
+ public int LineSegmentID { get; set; }
+
+ public double LineSegmentDistance { get; set; }
+
+ public double CurrentMagnitude { get; set; }
+
+ public double CurrentLag { get; set; }
+
+ public double PrefaultCurrent { get; set; }
+
+ public double PostfaultCurrent { get; set; }
+
+ public double ReactanceRatio { get; set; }
+
+ [FieldDataType(System.Data.DbType.DateTime2, Gemstone.Data.DatabaseType.SQLServer)]
+ public DateTime Inception { get; set; }
+
+ public double DurationSeconds { get; set; }
+
+ public double DurationCycles { get; set; }
+
+ public string FaultType { get; set; }
+
+ public bool IsSelectedAlgorithm { get; set; }
+
+ public bool IsValid { get; set; }
+
+ public bool IsSuppressed { get; set; }
+ }
+
+ public class FaultSummary : Fault { }
+
+ [TableName("FaultView")]
+ public class FaultView : Fault
+ {
+ public string MeterName { get; set; }
+
+ public string ShortName { get; set; }
+
+ public string LocationName { get; set; }
+
+ public int MeterID { get; set; }
+
+ public int LineID { get; set; }
+
+ public string LineName { get; set; }
+
+ public int Voltage { get; set; }
+
+ public DateTime InceptionTime { get; set; }
+
+ public double CurrentDistance { get; set; }
+
+ public int RK { get; set; }
+ }
+
+ [TableName("FaultView")]
+ public class FaultForMeter: FaultView { }
+
+ public class FaultsDetailsByDate
+ {
+ public int thefaultid { get; set; }
+
+ public string thesite { get; set; }
+
+ public string locationname { get; set; }
+
+ public int themeterid { get; set; }
+
+ public int thelineid { get; set; }
+
+ public int theeventid { get; set; }
+
+ public string thelinename { get; set; }
+
+ public int voltage { get; set; }
+
+ public string theinceptiontime { get; set; }
+
+ public string thefaulttype { get; set; }
+
+ public double thecurrentdistance { get; set; }
+
+ public int notecount { get; set; }
+
+ public int rk { get; set; }
+
+ [NonRecordField]
+ public string theeventtype { get; set; }
+ }
+}
\ No newline at end of file
diff --git a/src/Libraries/openXDA.Model/Events/Faults/FaultCurve.cs b/src/Libraries/openXDA.Model/Events/Faults/FaultCurve.cs
new file mode 100644
index 00000000..02268fa2
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Events/Faults/FaultCurve.cs
@@ -0,0 +1,250 @@
+//******************************************************************************************************
+// FaultCurve.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 09/06/2017 - Stephen C. Wills
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System.ComponentModel.DataAnnotations;
+using Gemstone;
+using Gemstone.Data.Model;
+using Ionic.Zlib;
+
+namespace openXDA.Model
+{
+ public class FaultCurve
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ public int EventID { get; set; }
+
+ public int PathNumber { get; set; }
+
+ [StringLength(200)]
+ public string Algorithm { get; set; }
+
+ public byte[] Data { get; set; }
+
+ public byte[] AngleData { get; set; }
+
+ #region [Private Class]
+ private class DataPoint
+ {
+ public DateTime Time;
+ public double Value;
+ }
+
+ #endregion
+
+ #region [Methods]
+
+ public void Adjust(Ticks ticks)
+ {
+ // If the blob contains the GZip header,
+ // move from Legacy Compression to normal Compression
+ if (this.Data[0] == 0x1F && this.Data[1] == 0x8B)
+ {
+ this.Data = MigrateCompression(this.Data);
+ }
+
+ // If the blob contains the GZip header,
+ // move from Legacy Compression to normal Compression
+ if (this.AngleData[0] == 0x1F && this.AngleData[1] == 0x8B)
+ {
+ this.AngleData = MigrateCompression(this.AngleData);
+ }
+
+ this.Data = ChangeTS(this.Data, ticks);
+ this.AngleData = ChangeTS(this.AngleData, ticks);
+
+
+ }
+
+ private static byte[] ChangeTS(byte[] data, Ticks ticks)
+ {
+ data[0] = 0x1F;
+ data[1] = 0x8B;
+
+ byte[] uncompressedData = GZipStream.UncompressBuffer(data);
+ byte[] resultData = new byte[uncompressedData.Length];
+
+ uncompressedData.CopyTo(resultData,0);
+
+ int offset = 0;
+
+ int m_samples = LittleEndian.ToInt32(uncompressedData, offset);
+ offset += sizeof(int);
+
+ int timeValues = LittleEndian.ToInt32(uncompressedData, offset);
+
+ int startTS = offset;
+
+ offset += sizeof(int);
+
+ long currentValue = LittleEndian.ToInt64(uncompressedData, offset);
+
+ DateTime startTime = new DateTime(currentValue);
+ startTime = startTime.AddTicks(ticks);
+
+ LittleEndian.CopyBytes(startTime.Ticks, resultData, startTS);
+
+ resultData = GZipStream.CompressBuffer(resultData);
+ resultData[0] = 0x44;
+ resultData[1] = 0x33;
+ return resultData;
+ }
+
+ private static byte[] MigrateCompression(byte[] data)
+ {
+ byte[] uncompressedData;
+ int offset;
+ DateTime[] times;
+ List series;
+ int seriesID = 0;
+
+ uncompressedData = GZipStream.UncompressBuffer(data);
+ offset = 0;
+
+ int m_samples = LittleEndian.ToInt32(uncompressedData, offset);
+ offset += sizeof(int);
+
+ times = new DateTime[m_samples];
+
+ for (int i = 0; i < m_samples; i++)
+ {
+ times[i] = new DateTime(LittleEndian.ToInt64(uncompressedData, offset));
+ offset += sizeof(long);
+ }
+
+ series = new List();
+
+ while (offset < uncompressedData.Length)
+ {
+
+ seriesID = LittleEndian.ToInt32(uncompressedData, offset);
+ offset += sizeof(int);
+
+
+ for (int i = 0; i < m_samples; i++)
+ {
+ series.Add(new DataPoint()
+ {
+ Time = times[i],
+ Value = LittleEndian.ToDouble(uncompressedData, offset)
+ });
+
+ offset += sizeof(double);
+ }
+ }
+
+ var timeSeries = series.Select(dataPoint => new { Time = dataPoint.Time.Ticks, Compressed = false }).ToList();
+
+ for (int i = 1; i < timeSeries.Count; i++)
+ {
+ long previousTimestamp = series[i - 1].Time.Ticks;
+ long timestamp = timeSeries[i].Time;
+ long diff = timestamp - previousTimestamp;
+
+ if (diff >= 0 && diff <= ushort.MaxValue)
+ timeSeries[i] = new { Time = diff, Compressed = true };
+
+
+ }
+
+ int timeSeriesByteLength = timeSeries.Sum(obj => obj.Compressed ? sizeof(ushort) : sizeof(int) + sizeof(long));
+ int dataSeriesByteLength = sizeof(int) + (2 * sizeof(double)) + (m_samples * sizeof(ushort));
+ int totalByteLength = sizeof(int) + timeSeriesByteLength + dataSeriesByteLength;
+
+
+ byte[] result = new byte[totalByteLength];
+ offset = 0;
+
+ offset += LittleEndian.CopyBytes(m_samples, result, offset);
+
+ List uncompressedIndexes = timeSeries
+ .Select((obj, Index) => new { obj.Compressed, Index })
+ .Where(obj => !obj.Compressed)
+ .Select(obj => obj.Index)
+ .ToList();
+
+ for (int i = 0; i < uncompressedIndexes.Count; i++)
+ {
+ int index = uncompressedIndexes[i];
+ int nextIndex = (i + 1 < uncompressedIndexes.Count) ? uncompressedIndexes[i + 1] : timeSeries.Count;
+
+ offset += LittleEndian.CopyBytes(nextIndex - index, result, offset);
+ offset += LittleEndian.CopyBytes(timeSeries[index].Time, result, offset);
+
+ for (int j = index + 1; j < nextIndex; j++)
+ offset += LittleEndian.CopyBytes((ushort)timeSeries[j].Time, result, offset);
+ }
+
+ const ushort NaNValue = ushort.MaxValue;
+ const ushort MaxCompressedValue = ushort.MaxValue - 1;
+ double range = series.Select(item => item.Value).Max() - series.Select(item => item.Value).Min();
+ double decompressionOffset = series.Select(item => item.Value).Min();
+ double decompressionScale = range / MaxCompressedValue;
+ double compressionScale = (decompressionScale != 0.0D) ? 1.0D / decompressionScale : 0.0D;
+
+ offset += LittleEndian.CopyBytes(seriesID, result, offset);
+ offset += LittleEndian.CopyBytes(decompressionOffset, result, offset);
+ offset += LittleEndian.CopyBytes(decompressionScale, result, offset);
+
+ foreach (DataPoint dataPoint in series)
+ {
+ ushort compressedValue = (ushort)Math.Round((dataPoint.Value - decompressionOffset) * compressionScale);
+
+ if (compressedValue == NaNValue)
+ compressedValue--;
+
+ if (double.IsNaN(dataPoint.Value))
+ compressedValue = NaNValue;
+
+ offset += LittleEndian.CopyBytes(compressedValue, result, offset);
+ }
+
+ byte[] returnArray = GZipStream.CompressBuffer(result);
+ returnArray[0] = 0x44;
+ returnArray[1] = 0x33;
+
+ return returnArray;
+
+ }
+ #endregion
+ }
+
+ public class FaultCurveStatistic
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ public int FaultCurveID { get; set; }
+
+ public int FaultNumber { get; set; }
+
+ public double Maximum { get; set; }
+
+ public double Minimum { get; set; }
+
+ public double Average { get; set; }
+
+ public double StandardDeviation { get; set; }
+ }
+}
diff --git a/src/Libraries/openXDA.Model/Events/RelayPerformance.cs b/src/Libraries/openXDA.Model/Events/RelayPerformance.cs
new file mode 100644
index 00000000..4950235e
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Events/RelayPerformance.cs
@@ -0,0 +1,63 @@
+//******************************************************************************************************
+// RelayPerformance.cs - Gbtc
+//
+// Copyright © 2019, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may not use this
+// file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 07/10/2019 - Christoph Lackner
+// Generated original version of source code.
+// 08/20/2021 - Christoph Lackner
+// Added additional Trip Coil Curve points.
+//
+//******************************************************************************************************
+
+using System.Data;
+using Gemstone.Data;
+using Gemstone.Data.Model;
+
+namespace openXDA.Model
+{
+ public class RelayPerformance
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+ public int EventID { get; set; }
+ public int ChannelID { get; set; }
+ public double? Imax1 { get; set; }
+ public int? Tmax1 { get; set; }
+ public double? Imax2 { get; set; }
+ public int? TplungerLatch { get; set; }
+ public double IplungerLatch { get; set; }
+ public double? Idrop { get; set; }
+ public int? TiDrop { get; set; }
+ public int? Tend { get; set; }
+
+ [FieldDataType(DbType.DateTime2, DatabaseType.SQLServer)]
+ public DateTime? TripInitiate { get; set; }
+ public int? TripTime { get; set; }
+ public int? PickupTime { get; set; }
+ public double? TripTimeCurrent { get; set;}
+ public double? PickupTimeCurrent { get; set; }
+ public double? TripCoilCondition { get; set; }
+ public int TripCoilConditionTime { get; set; }
+ public int? ExtinctionTimeA { get; set; }
+ public int? ExtinctionTimeB { get; set; }
+ public int? ExtinctionTimeC { get; set; }
+ public double? I2CA { get; set; }
+ public double? I2CB { get; set; }
+ public double? I2CC { get; set; }
+
+ }
+}
diff --git a/src/Libraries/openXDA.Model/Files/DataFile.cs b/src/Libraries/openXDA.Model/Files/DataFile.cs
new file mode 100644
index 00000000..10b9d4b5
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Files/DataFile.cs
@@ -0,0 +1,93 @@
+//******************************************************************************************************
+// DataFile.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/29/2017 - Billy Ernest
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System.Text;
+using Gemstone.Data.Model;
+using Gemstone.IO.Checksums;
+using Newtonsoft.Json;
+
+namespace openXDA.Model
+{
+ [Serializable]
+ public class DataFile
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ public int FileGroupID { get; set; }
+
+ public string FilePath { get; set; }
+
+ public int FilePathHash { get; set; }
+
+ public long FileSize { get; set; }
+
+ public DateTime CreationTime { get; set; }
+
+ public DateTime LastWriteTime { get; set; }
+
+ public DateTime LastAccessTime { get; set; }
+
+ [NonRecordField]
+ [JsonIgnore]
+ public FileBlob FileBlob { get; set; }
+
+ public static int GetHash(string filePath)
+ {
+ Encoding utf8 = new UTF8Encoding(false);
+ byte[] pathData = utf8.GetBytes(filePath);
+ return unchecked((int)Crc32.Compute(pathData, 0, pathData.Length));
+ }
+ }
+
+ [TableName("DataFile")]
+ public class DataFileDb : DataFile { }
+
+ public static partial class TableOperationsExtensions
+ {
+ public static DataFile QueryDataFile(this TableOperations dataFileTable, string filePath)
+ {
+ int hashCode = DataFile.GetHash(filePath);
+ DataFile dataFile = QueryDataFile(dataFileTable, filePath, hashCode);
+
+ if (dataFile != null)
+ return dataFile;
+
+ int legacyHashCode = filePath.GetHashCode();
+ dataFile = QueryDataFile(dataFileTable, filePath, legacyHashCode);
+
+ if (dataFile == null)
+ return null;
+
+ dataFile.FilePathHash = hashCode;
+ dataFileTable.UpdateRecord(dataFile);
+ return dataFile;
+ }
+
+ private static DataFile QueryDataFile(TableOperations dataFileTable, string filePath, int hashCode)
+ {
+ IEnumerable dataFiles = dataFileTable.QueryRecordsWhere("FilePathHash = {0}", hashCode);
+ return dataFiles.FirstOrDefault(dataFile => dataFile.FilePath == filePath);
+ }
+ }
+}
diff --git a/src/Libraries/openXDA.Model/Files/FileBlob.cs b/src/Libraries/openXDA.Model/Files/FileBlob.cs
new file mode 100644
index 00000000..00beeef8
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Files/FileBlob.cs
@@ -0,0 +1,38 @@
+//******************************************************************************************************
+// FileBlob.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/29/2017 - Billy Ernest
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using Gemstone.Data.Model;
+
+namespace openXDA.Model
+{
+ [Serializable]
+ public class FileBlob
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ public int DataFileID { get; set; }
+
+ public byte[] Blob { get; set; }
+ }
+}
\ No newline at end of file
diff --git a/src/Libraries/openXDA.Model/Files/FileGroup.cs b/src/Libraries/openXDA.Model/Files/FileGroup.cs
new file mode 100644
index 00000000..d6c50ab2
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Files/FileGroup.cs
@@ -0,0 +1,102 @@
+//******************************************************************************************************
+// FileGroup.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 08/29/2017 - Billy Ernest
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using Gemstone.Data;
+using Gemstone.Data.Model;
+
+namespace openXDA.Model
+{
+ [Serializable]
+ public class FileGroup
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ public int MeterID { get; set; }
+
+ [FieldDataType(System.Data.DbType.DateTime2, DatabaseType.SQLServer)]
+ public DateTime DataStartTime { get; set; }
+
+ [FieldDataType(System.Data.DbType.DateTime2, DatabaseType.SQLServer)]
+ public DateTime DataEndTime { get; set; }
+
+ [FieldDataType(System.Data.DbType.DateTime2, DatabaseType.SQLServer)]
+ public DateTime ProcessingStartTime { get; set; }
+
+ [FieldDataType(System.Data.DbType.DateTime2, DatabaseType.SQLServer)]
+ public DateTime ProcessingEndTime { get; set; }
+
+ public int ProcessingVersion { get; set; }
+
+ public int ProcessingStatus { get; set; }
+
+ [NonRecordField]
+ public List DataFiles { get; set; } = new List();
+
+ public void AddFieldValue(AdoDataConnection connection, string name, string value, string description = null)
+ {
+ TableOperations fileGroupFieldTable = new TableOperations(connection);
+ FileGroupField fileGroupField = fileGroupFieldTable.GetOrAdd(name, description);
+
+ TableOperations fileGroupFieldValueTable = new TableOperations(connection);
+ FileGroupFieldValue fileGroupFieldValue = new FileGroupFieldValue();
+ fileGroupFieldValue.FileGroupID = ID;
+ fileGroupFieldValue.FileGroupFieldID = fileGroupField.ID;
+ fileGroupFieldValue.Value = value;
+ fileGroupFieldValueTable.AddNewRecord(fileGroupFieldValue);
+ }
+
+ public void AddOrUpdateFieldValue(AdoDataConnection connection, string name, string value, string description = null)
+ {
+ TableOperations fileGroupFieldTable = new TableOperations(connection);
+ FileGroupField fileGroupField = fileGroupFieldTable.GetOrAdd(name, description);
+
+ TableOperations fileGroupFieldValueTable = new TableOperations(connection);
+ RecordRestriction fileGroupRestriction = new RecordRestriction("FileGroupID = {0}", ID);
+ RecordRestriction fileGroupFieldRestriction = new RecordRestriction("FileGroupFieldID = {0}", fileGroupField.ID);
+ RecordRestriction queryRestriction = fileGroupRestriction & fileGroupFieldRestriction;
+
+ FileGroupFieldValue fileGroupFieldValue = fileGroupFieldValueTable.QueryRecord(queryRestriction) ?? new FileGroupFieldValue()
+ {
+ FileGroupID = ID,
+ FileGroupFieldID = fileGroupField.ID
+ };
+
+ fileGroupFieldValue.Value = value;
+ fileGroupFieldValueTable.AddNewOrUpdateRecord(fileGroupFieldValue);
+ }
+ }
+
+ ///
+ /// Number indicating the processing status of a file group.
+ ///
+ public enum FileGroupProcessingStatus
+ {
+ Created = 0,
+ Queued = 1,
+ Processing = 2,
+ Success = 3,
+ Failed = 4,
+ PartialSuccess = 5
+ }
+}
diff --git a/src/Libraries/openXDA.Model/Files/FileGroupField.cs b/src/Libraries/openXDA.Model/Files/FileGroupField.cs
new file mode 100644
index 00000000..31021f88
--- /dev/null
+++ b/src/Libraries/openXDA.Model/Files/FileGroupField.cs
@@ -0,0 +1,61 @@
+//******************************************************************************************************
+// FileGroupField.cs - Gbtc
+//
+// Copyright © 2019, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may not use this
+// file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 06/18/2019 - Stephen C. Wills
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System;
+using System.ComponentModel.DataAnnotations;
+using Gemstone.Data.Model;
+
+namespace openXDA.Model
+{
+ public class FileGroupField
+ {
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ [StringLength(200)]
+ public string Name { get; set; }
+
+ public string Description { get; set; }
+ }
+
+ public static partial class TableOperationsExtensions
+ {
+ public static FileGroupField GetOrAdd(this TableOperations fileGroupFieldTable, string name, string description = null)
+ {
+ FileGroupField fileGroupField = fileGroupFieldTable.QueryRecordWhere("Name = {0}", name);
+
+ if ((object)fileGroupField == null)
+ {
+ fileGroupField = new FileGroupField();
+ fileGroupField.Name = name;
+ fileGroupField.Description = description;
+
+ fileGroupFieldTable.AddNewRecord(fileGroupField);
+
+ fileGroupField.ID = fileGroupFieldTable.Connection.ExecuteScalar("SELECT @@IDENTITY");
+ }
+
+ return fileGroupField;
+ }
+ }
+}
diff --git a/src/OpenSEE/App_Start/FilterConfig.cs b/src/Libraries/openXDA.Model/Files/FileGroupFieldValue.cs
similarity index 73%
rename from src/OpenSEE/App_Start/FilterConfig.cs
rename to src/Libraries/openXDA.Model/Files/FileGroupFieldValue.cs
index 97498ed4..55140bae 100644
--- a/src/OpenSEE/App_Start/FilterConfig.cs
+++ b/src/Libraries/openXDA.Model/Files/FileGroupFieldValue.cs
@@ -1,7 +1,7 @@
//******************************************************************************************************
-// FilterConfig.cs - Gbtc
+// FileGroupFieldValue.cs - Gbtc
//
-// Copyright © 2020, Grid Protection Alliance. All Rights Reserved.
+// Copyright © 2019, Grid Protection Alliance. All Rights Reserved.
//
// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
// the NOTICE file distributed with this work for additional information regarding copyright ownership.
@@ -16,21 +16,24 @@
//
// Code Modification History:
// ----------------------------------------------------------------------------------------------------
-// 02/19/2020 - Billy Ernest
+// 06/18/2019 - Stephen C. Wills
// Generated original version of source code.
//
//******************************************************************************************************
-using System.Web;
-using System.Web.Mvc;
+using Gemstone.Data.Model;
-namespace OpenSEE
+namespace openXDA.Model
{
- public class FilterConfig
+ public class FileGroupFieldValue
{
- public static void RegisterGlobalFilters(GlobalFilterCollection filters)
- {
- filters.Add(new HandleErrorAttribute());
- }
+ [PrimaryKey(true)]
+ public int ID { get; set; }
+
+ public int FileGroupID { get; set; }
+
+ public int FileGroupFieldID { get; set; }
+
+ public string Value { get; set; }
}
}
diff --git a/src/Libraries/openXDA.Model/LazyContext.cs b/src/Libraries/openXDA.Model/LazyContext.cs
new file mode 100644
index 00000000..6f5c7acc
--- /dev/null
+++ b/src/Libraries/openXDA.Model/LazyContext.cs
@@ -0,0 +1,369 @@
+//******************************************************************************************************
+// LazyContext.cs - Gbtc
+//
+// Copyright © 2017, Grid Protection Alliance. All Rights Reserved.
+//
+// Licensed to the Grid Protection Alliance (GPA) under one or more contributor license agreements. See
+// the NOTICE file distributed with this work for additional information regarding copyright ownership.
+// The GPA licenses this file to you under the MIT License (MIT), the "License"; you may
+// not use this file except in compliance with the License. You may obtain a copy of the License at:
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless agreed to in writing, the subject software distributed under the License is distributed on an
+// "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Refer to the
+// License for the specific language governing permissions and limitations.
+//
+// Code Modification History:
+// ----------------------------------------------------------------------------------------------------
+// 09/04/2017 - Stephen C. Wills
+// Generated original version of source code.
+//
+//******************************************************************************************************
+
+using System;
+using System.Collections.Generic;
+using Gemstone.Data;
+
+namespace openXDA.Model
+{
+ internal class LazyContext
+ {
+ #region [ Members ]
+
+ // Fields
+ private Dictionary m_locations;
+ private Dictionary m_meters;
+ private Dictionary