From b73a0c85c4367527b70aef9ee5a7372229b0661d Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Wed, 28 Jun 2023 11:10:23 +0200 Subject: [PATCH 01/26] proposed --- .config/dotnet-tools.json | 6 + Extractor/Pushers/CDFPusher.cs | 1282 ++++++++++++++++++++++---------- 2 files changed, 898 insertions(+), 390 deletions(-) diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json index 43ec7758..49149206 100644 --- a/.config/dotnet-tools.json +++ b/.config/dotnet-tools.json @@ -7,6 +7,12 @@ "commands": [ "sbom-tool" ] + }, + "paket": { + "version": "7.2.1", + "commands": [ + "paket" + ] } } } \ No newline at end of file diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index de16473e..4dbd2f51 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -33,7 +33,6 @@ You should have received a copy of the GNU General Public License using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; -using System.Net.Http; using System.Text.Json; using System.Threading; using System.Threading.Tasks; @@ -58,13 +57,21 @@ public sealed class CDFPusher : IPusher public PusherInput? PendingNodes { get; set; } private UAExtractor extractor; - public UAExtractor Extractor { get => extractor; set { - extractor = value; - if (fdmDestination != null) + private bool pushCleanAssets; + private bool pushCleanTimeseries; + + public UAExtractor Extractor + { + get => extractor; + set { - fdmDestination.Extractor = value; + extractor = value; + if (fdmDestination != null) + { + fdmDestination.Extractor = value; + } } - } } + } public IPusherConfig BaseConfig { get; } private readonly HashSet mismatchedTimeseries = new HashSet(); @@ -79,7 +86,8 @@ public CDFPusher( FullConfig fullConfig, CognitePusherConfig config, CogniteDestination destination, - IServiceProvider provider) + IServiceProvider provider + ) { extractor = null!; this.log = log; @@ -87,40 +95,77 @@ public CDFPusher( BaseConfig = config; this.destination = destination; this.fullConfig = fullConfig; - if (config.BrowseCallback != null && (config.BrowseCallback.Id.HasValue || !string.IsNullOrEmpty(config.BrowseCallback.ExternalId))) + if ( + config.BrowseCallback != null + && ( + config.BrowseCallback.Id.HasValue + || !string.IsNullOrEmpty(config.BrowseCallback.ExternalId) + ) + ) { callback = new BrowseCallback(destination, config.BrowseCallback, log); } if (config.FlexibleDataModels != null && config.FlexibleDataModels.Enabled) { - fdmDestination = new FDMWriter(provider.GetRequiredService(), destination, - provider.GetRequiredService>()); + fdmDestination = new FDMWriter( + provider.GetRequiredService(), + destination, + provider.GetRequiredService>() + ); } + + pushCleanAssets = + string.IsNullOrWhiteSpace(config.RawMetadata?.Database) + && string.IsNullOrWhiteSpace(config.RawMetadata?.AssetsTable); + + pushCleanTimeseries = + string.IsNullOrWhiteSpace(config.RawMetadata?.Database) + && string.IsNullOrWhiteSpace(config.RawMetadata?.TimeseriesTable); } - private static readonly Counter dataPointsCounter = Metrics - .CreateCounter("opcua_datapoints_pushed_cdf", "Number of datapoints pushed to CDF"); - private static readonly Counter dataPointPushes = Metrics - .CreateCounter("opcua_datapoint_pushes_cdf", "Number of times datapoints have been pushed to CDF"); - private static readonly Counter dataPointPushFailures = Metrics - .CreateCounter("opcua_datapoint_push_failures_cdf", "Number of completely failed pushes of datapoints to CDF"); - private static readonly Counter eventCounter = Metrics - .CreateCounter("opcua_events_pushed_cdf", "Number of events pushed to CDF"); - private static readonly Counter eventPushCounter = Metrics - .CreateCounter("opcua_event_pushes_cdf", "Number of times events have been pushed to CDF"); - private static readonly Counter eventPushFailures = Metrics - .CreateCounter("opcua_event_push_failures_cdf", "Number of times events have been pushed to CDF"); - private static readonly Counter nodeEnsuringFailures = Metrics - .CreateCounter("opcua_node_ensure_failures_cdf", - "Number of completely failed requests to CDF when ensuring assets/timeseries exist"); - private static readonly Counter skippedEvents = Metrics - .CreateCounter("opcua_skipped_events_cdf", "Number of events skipped by CDF pusher"); - private static readonly Gauge missingTimeseriesCnt = Metrics - .CreateGauge("opcua_missing_timeseries", "Number of distinct timeseries that have been found to be missing in CDF"); - private static readonly Gauge mismatchedTimeseriesCnt = Metrics - .CreateGauge("opcua_mismatched_timeseries", "Number of distinct timeseries that have been found to have different types in OPC-UA and in CDF"); + private static readonly Counter dataPointsCounter = Metrics.CreateCounter( + "opcua_datapoints_pushed_cdf", + "Number of datapoints pushed to CDF" + ); + private static readonly Counter dataPointPushes = Metrics.CreateCounter( + "opcua_datapoint_pushes_cdf", + "Number of times datapoints have been pushed to CDF" + ); + private static readonly Counter dataPointPushFailures = Metrics.CreateCounter( + "opcua_datapoint_push_failures_cdf", + "Number of completely failed pushes of datapoints to CDF" + ); + private static readonly Counter eventCounter = Metrics.CreateCounter( + "opcua_events_pushed_cdf", + "Number of events pushed to CDF" + ); + private static readonly Counter eventPushCounter = Metrics.CreateCounter( + "opcua_event_pushes_cdf", + "Number of times events have been pushed to CDF" + ); + private static readonly Counter eventPushFailures = Metrics.CreateCounter( + "opcua_event_push_failures_cdf", + "Number of times events have been pushed to CDF" + ); + private static readonly Counter nodeEnsuringFailures = Metrics.CreateCounter( + "opcua_node_ensure_failures_cdf", + "Number of completely failed requests to CDF when ensuring assets/timeseries exist" + ); + private static readonly Counter skippedEvents = Metrics.CreateCounter( + "opcua_skipped_events_cdf", + "Number of events skipped by CDF pusher" + ); + private static readonly Gauge missingTimeseriesCnt = Metrics.CreateGauge( + "opcua_missing_timeseries", + "Number of distinct timeseries that have been found to be missing in CDF" + ); + private static readonly Gauge mismatchedTimeseriesCnt = Metrics.CreateGauge( + "opcua_mismatched_timeseries", + "Number of distinct timeseries that have been found to have different types in OPC-UA and in CDF" + ); private readonly ILogger log; + #region Interface @@ -128,45 +173,72 @@ public CDFPusher( /// Attempts to push the given list of datapoints to CDF. /// ' /// True if push succeeded, false if it failed, null if there were no points to push. - public async Task PushDataPoints(IEnumerable points, CancellationToken token) + public async Task PushDataPoints( + IEnumerable points, + CancellationToken token + ) { - if (points == null) return null; + if (points == null) + return null; Dictionary> dataPointList = points .Where(dp => dp.Timestamp > DateTime.UnixEpoch) .GroupBy(dp => dp.Id) - .Where(group => !mismatchedTimeseries.Contains(group.Key) - && !missingTimeseries.Contains(group.Key)) + .Where( + group => + !mismatchedTimeseries.Contains(group.Key) + && !missingTimeseries.Contains(group.Key) + ) .ToDictionary(group => group.Key, group => group.ToList()); int count = dataPointList.Aggregate(0, (seed, points) => seed + points.Value.Count); - if (count == 0) return null; + if (count == 0) + return null; - var inserts = dataPointList.ToDictionary(kvp => - Identity.Create(kvp.Key), - kvp => kvp.Value.Select( - dp => dp.IsString ? new Datapoint(dp.Timestamp, dp.StringValue) : new Datapoint(dp.Timestamp, dp.DoubleValue.Value)) - ); + var inserts = dataPointList.ToDictionary( + kvp => Identity.Create(kvp.Key), + kvp => + kvp.Value.Select( + dp => + dp.IsString + ? new Datapoint(dp.Timestamp, dp.StringValue) + : new Datapoint(dp.Timestamp, dp.DoubleValue.Value) + ) + ); if (fullConfig.DryRun) { - log.LogInformation("Dry run enabled. Would insert {Count} datapoints over {C2} timeseries to CDF", count, inserts.Count); + log.LogInformation( + "Dry run enabled. Would insert {Count} datapoints over {C2} timeseries to CDF", + count, + inserts.Count + ); return null; } try { - var result = await destination.InsertDataPointsAsync(inserts, SanitationMode.Clean, RetryMode.OnError, token); + var result = await destination.InsertDataPointsAsync( + inserts, + SanitationMode.Clean, + RetryMode.OnError, + token + ); int realCount = count; log.LogResult(result, RequestType.CreateDatapoints, false, LogLevel.Debug); if (result.Errors != null) { - var missing = result.Errors.FirstOrDefault(err => err.Type == ErrorType.ItemMissing); + var missing = result.Errors.FirstOrDefault( + err => err.Type == ErrorType.ItemMissing + ); if (missing?.Skipped != null) { - log.LogError("Failed to push datapoints to CDF, missing ids: {Ids}", missing.Skipped.Select(ms => ms.Id)); + log.LogError( + "Failed to push datapoints to CDF, missing ids: {Ids}", + missing.Skipped.Select(ms => ms.Id) + ); foreach (var skipped in missing.Skipped) { missingTimeseries.Add(skipped.Id.ExternalId); @@ -174,10 +246,15 @@ public CDFPusher( missingTimeseriesCnt.Set(missing.Skipped.Count()); } - var mismatched = result.Errors.FirstOrDefault(err => err.Type == ErrorType.MismatchedType); + var mismatched = result.Errors.FirstOrDefault( + err => err.Type == ErrorType.MismatchedType + ); if (mismatched?.Skipped != null) { - log.LogError("Failed to push datapoints to CDF, mismatched timeseries: {Ids}", mismatched.Skipped.Select(ms => ms.Id)); + log.LogError( + "Failed to push datapoints to CDF, mismatched timeseries: {Ids}", + mismatched.Skipped.Select(ms => ms.Id) + ); foreach (var skipped in mismatched.Skipped) { mismatchedTimeseries.Add(skipped.Id.ExternalId); @@ -197,15 +274,18 @@ public CDFPusher( } } - - result.ThrowOnFatal(); - log.LogDebug("Successfully pushed {Real} / {Total} points to CDF", realCount, count); + log.LogDebug( + "Successfully pushed {Real} / {Total} points to CDF", + realCount, + count + ); dataPointPushes.Inc(); dataPointsCounter.Inc(realCount); - if (realCount == 0) return null; + if (realCount == 0) + return null; } catch (Exception e) { @@ -217,18 +297,23 @@ public CDFPusher( return true; } + /// /// Attempts to push the given list of events to CDF. /// /// True if push succeeded, false if it failed, null if there were no events to push. public async Task PushEvents(IEnumerable events, CancellationToken token) { - if (events == null) return null; + if (events == null) + return null; var eventList = new List(); int count = 0; foreach (var buffEvent in events) { - if (buffEvent.Time < PusherUtils.CogniteMinTime || buffEvent.Time > PusherUtils.CogniteMaxTime) + if ( + buffEvent.Time < PusherUtils.CogniteMinTime + || buffEvent.Time > PusherUtils.CogniteMaxTime + ) { skippedEvents.Inc(); continue; @@ -237,7 +322,8 @@ public CDFPusher( count++; } - if (count == 0) return null; + if (count == 0) + return null; if (fullConfig.DryRun) { @@ -247,24 +333,40 @@ public CDFPusher( try { - var result = await destination.EnsureEventsExistsAsync(eventList - .Select(evt => evt.ToCDFEvent(Extractor, config.DataSet?.Id, nodeToAssetIds)) - .Where(evt => evt != null), RetryMode.OnError, SanitationMode.Clean, token); + var result = await destination.EnsureEventsExistsAsync( + eventList + .Select( + evt => evt.ToCDFEvent(Extractor, config.DataSet?.Id, nodeToAssetIds) + ) + .Where(evt => evt != null), + RetryMode.OnError, + SanitationMode.Clean, + token + ); log.LogResult(result, RequestType.CreateEvents, false, LogLevel.Debug); int skipped = 0; if (result.Errors != null) { - skipped = result.Errors.Aggregate(0, (seed, err) => - seed + (err.Skipped?.Count() ?? 0)); - - var fatalError = result.Errors.FirstOrDefault(err => err.Type == ErrorType.FatalFailure); + skipped = result.Errors.Aggregate( + 0, + (seed, err) => seed + (err.Skipped?.Count() ?? 0) + ); + + var fatalError = result.Errors.FirstOrDefault( + err => err.Type == ErrorType.FatalFailure + ); if (fatalError != null) { - log.LogError("Failed to push {NumFailedEvents} events to CDF: {Message}", count, fatalError.Exception?.Message); + log.LogError( + "Failed to push {NumFailedEvents} events to CDF: {Message}", + count, + fatalError.Exception?.Message + ); eventPushFailures.Inc(); - return fatalError.Exception is ResponseException rex && (rex.Code == 400 || rex.Code == 409); + return fatalError.Exception is ResponseException rex + && (rex.Code == 400 || rex.Code == 409); } } @@ -275,7 +377,12 @@ public CDFPusher( } catch (Exception exc) { - log.LogError(exc, "Failed to push {NumFailedEvents} events to CDF: {Message}", count, exc.Message); + log.LogError( + exc, + "Failed to push {NumFailedEvents} events to CDF: {Message}", + count, + exc.Message + ); eventPushFailures.Inc(); return exc is ResponseException rex && (rex.Code == 400 || rex.Code == 409); } @@ -295,7 +402,8 @@ public async Task PushNodes( IEnumerable variables, IEnumerable references, UpdateConfig update, - CancellationToken token) + CancellationToken token + ) { var result = new PushResult(); var report = new BrowseReport @@ -317,13 +425,22 @@ public async Task PushNodes( return result; } - log.LogInformation("Testing {TotalNodesToTest} nodes against CDF", variables.Count() + objects.Count()); + log.LogInformation( + "Testing {TotalNodesToTest} nodes against CDF", + variables.Count() + objects.Count() + ); if (fullConfig.DryRun) { if (fdmDestination != null) { - await fdmDestination.PushNodes(objects, variables, references, Extractor, token); + await fdmDestination.PushNodes( + objects, + variables, + references, + Extractor, + token + ); } return result; } @@ -342,71 +459,63 @@ public async Task PushNodes( return result; } - if (fdmDestination != null) + var assetsMap = MapAssets(objects); + var timeseriesMap = MapTimeseries(variables); + bool isTimeseriesPushed = true; + + if (pushCleanAssets) { - bool pushResult = true; - try - { - var tsIds = new ConcurrentDictionary( - variables.ToDictionary(ts => ts.GetUniqueId(Extractor))!); - await CreateTimeseries(tsIds, report, true, token); - } - catch (Exception ex) - { - log.LogError(ex, "Failed to push minimal timeseries to CDF"); - pushResult = false; - } - - if (pushResult) - { - try - { - pushResult = await fdmDestination.PushNodes(objects, variables, references, Extractor, token); - } - catch (Exception e) - { - log.LogError(e, "Failed to push to flexible data models"); - pushResult = false; - } - } - - result.Objects = pushResult; - result.Variables = pushResult; - result.References = pushResult; + await PushCleanAssets(assetsMap, update.Variables, report, token); } - else + + if (pushCleanTimeseries) { - try - { - await PushAssets(objects, update.Objects, report, token); - } - catch (Exception e) - { - log.LogError(e, "Failed to ensure assets"); - result.Objects = false; - } + isTimeseriesPushed = await PushCleanTimeseries( + timeseriesMap, + update.Variables, + report, + token + ); + } - try - { - await PushTimeseries(variables, update.Variables, report, token); - } - catch (Exception e) - { - log.LogError(e, "Failed to ensure timeseries"); - result.Variables = false; - } + var tasks = new List(); - try - { - await PushReferences(references, report, token); - } - catch (Exception e) - { - log.LogError(e, "Failed to ensure references"); - result.References = false; - } + if (isTimeseriesPushed && fdmDestination != null) + { + tasks.Add( + Task.Run( + () => + fdmDestination.PushNodes( + objects, + variables, + references, + Extractor, + token + ) + ) + ); + } + + if (!pushCleanAssets) + { + tasks.Add( + Task.Run(() => PushRawAssets(assetsMap, update.Variables, report, token)) + ); + } + + if (!pushCleanTimeseries) + { + tasks.Add( + Task.Run( + () => PushRawTimeseries(timeseriesMap, update.Variables, report, token) + ) + ); } + tasks.Add(Task.Run(() => PushReferences(references, report, token))); + + await Task.WhenAll(tasks); + log.LogInformation("Finish pushing nodes to CDF"); if (result.Objects && result.References && result.Variables) @@ -423,6 +532,81 @@ public async Task PushNodes( return result; } + + private ConcurrentDictionary MapAssets(IEnumerable objects) + { + return new ConcurrentDictionary( + objects + .Where(node => node.Source != NodeSource.CDF) + .ToDictionary(obj => Extractor.GetUniqueId(obj.Id)!) + ); + } + + private ConcurrentDictionary MapTimeseries( + IEnumerable variables + ) + { + return new ConcurrentDictionary( + variables.ToDictionary(ts => ts.GetUniqueId(Extractor)!) + ); + } + + private async Task PushCleanAssets( + ConcurrentDictionary assetsMap, + TypeUpdateConfig update, + BrowseReport report, + CancellationToken token + ) + { + var assets = await CreateAssets(assetsMap, report, token); + + if (update.AnyUpdate) + { + await UpdateAssets(assetsMap, assets, update, report, token); + } + + return true; + } + + private async Task PushCleanTimeseries( + ConcurrentDictionary timeseriesMap, + TypeUpdateConfig update, + BrowseReport report, + CancellationToken token + ) + { + var timeseries = await CreateTimeseries( + timeseriesMap, + report, + config.SkipMetadata, + token + ); + + var toPushMeta = timeseriesMap + .Where(kvp => kvp.Value.Source != NodeSource.CDF) + .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); + + if (update.AnyUpdate && toPushMeta.Any()) + { + await UpdateTimeseries(toPushMeta, timeseries, update, report, token); + } + + return true; + } + + // { + // await PushAssets(objects, variables, update.Objects, report, token); + // await CreateTimeseriesMetadata(variables, report, token); + // } + + // private async Task CreateTimeseriesMetadata(IEnumerable variables, BrowseReport report, CancellationToken token) + // { + // var tsMap = new ConcurrentDictionary( + // variables.ToDictionary(ts => ts.GetUniqueId(Extractor))!); + // await CreateTimeseries(tsMap, report, true, token); + // } + + /// /// Reset the pusher, preparing it to be restarted /// @@ -431,6 +615,7 @@ public void Reset() missingTimeseries.Clear(); mismatchedTimeseries.Clear(); } + /// /// Initialize extracted datapoint ranges on the given list of states. /// @@ -441,9 +626,11 @@ public void Reset() public async Task InitExtractedRanges( IEnumerable states, bool backfillEnabled, - CancellationToken token) + CancellationToken token + ) { - if (!states.Any() || !config.ReadExtractedRanges || fullConfig.DryRun) return true; + if (!states.Any() || !config.ReadExtractedRanges || fullConfig.DryRun) + return true; var ids = new List(); foreach (var state in states) { @@ -452,7 +639,8 @@ public async Task InitExtractedRanges( for (int i = 0; i < state.ArrayDimensions[0]; i++) { var id = Extractor.GetUniqueId(state.SourceId, i); - if (id == null) break; + if (id == null) + break; ids.Add(id); } } @@ -466,7 +654,11 @@ public async Task InitExtractedRanges( Dictionary ranges; try { - var dict = await destination.GetExtractedRanges(ids.Select(Identity.Create).ToList(), token, backfillEnabled); + var dict = await destination.GetExtractedRanges( + ids.Select(Identity.Create).ToList(), + token, + backfillEnabled + ); ranges = dict.ToDictionary(kvp => kvp.Key.ExternalId, kvp => kvp.Value); } catch (Exception ex) @@ -482,7 +674,8 @@ public async Task InitExtractedRanges( for (int i = 0; i < state.ArrayDimensions[0]; i++) { var id = Extractor.GetUniqueId(state.SourceId, i); - if (id == null) break; + if (id == null) + break; if (ranges.TryGetValue(id, out var range)) { if (range == TimeRange.Empty) @@ -514,6 +707,7 @@ public async Task InitExtractedRanges( return true; } + /// /// Test that the extractor is capable of pushing to CDF. /// Also fetches DataSet externalId. @@ -522,7 +716,8 @@ public async Task InitExtractedRanges( /// True if pushing is possible, false if not. public async Task TestConnection(FullConfig fullConfig, CancellationToken token) { - if (fullConfig.DryRun) return true; + if (fullConfig.DryRun) + return true; try { @@ -530,20 +725,31 @@ public async Task InitExtractedRanges( } catch (Exception ex) { - log.LogError("Failed to get CDF login status, this is likely a problem with the network or configuration. Project {Project} at {Url}: {Message}", - config.Project, config.Host, ex.Message); + log.LogError( + "Failed to get CDF login status, this is likely a problem with the network or configuration. Project {Project} at {Url}: {Message}", + config.Project, + config.Host, + ex.Message + ); return false; } try { - await destination.CogniteClient.TimeSeries.ListAsync(new TimeSeriesQuery { Limit = 1 }, token); + await destination.CogniteClient.TimeSeries.ListAsync( + new TimeSeriesQuery { Limit = 1 }, + token + ); } catch (ResponseException ex) { - log.LogError("Could not access CDF Time Series - most likely due " + - "to insufficient access rights on API key. Project {Project} at {Host}: {Message}", - config.Project, config.Host, ex.Message); + log.LogError( + "Could not access CDF Time Series - most likely due " + + "to insufficient access rights on API key. Project {Project} at {Host}: {Message}", + config.Project, + config.Host, + ex.Message + ); return false; } @@ -551,13 +757,20 @@ public async Task InitExtractedRanges( { try { - await destination.CogniteClient.Events.ListAsync(new EventQuery { Limit = 1 }, token); + await destination.CogniteClient.Events.ListAsync( + new EventQuery { Limit = 1 }, + token + ); } catch (ResponseException ex) { - log.LogError("Could not access CDF Events, though event emitters are specified - most likely due " + - "to insufficient access rights on API key. Project {Project} at {Host}: {Message}", - config.Project, config.Host, ex.Message); + log.LogError( + "Could not access CDF Events, though event emitters are specified - most likely due " + + "to insufficient access rights on API key. Project {Project} at {Host}: {Message}", + config.Project, + config.Host, + ex.Message + ); return false; } } @@ -573,20 +786,27 @@ public async Task InitExtractedRanges( return true; } + /// /// Push list of references as relationships to CDF. /// /// List of references to push /// True if nothing failed unexpectedly - private async Task PushReferences(IEnumerable references, BrowseReport report, CancellationToken token) + private async Task PushReferences( + IEnumerable references, + BrowseReport report, + CancellationToken token + ) { - if (references == null || !references.Any()) return; + if (references == null || !references.Any()) + return; var relationships = references .Select(reference => reference.ToRelationship(config.DataSet?.Id, Extractor)) .DistinctBy(rel => rel.ExternalId); - bool useRawRelationships = config.RawMetadata != null + bool useRawRelationships = + config.RawMetadata != null && !string.IsNullOrWhiteSpace(config.RawMetadata.Database) && !string.IsNullOrWhiteSpace(config.RawMetadata.RelationshipsTable); @@ -598,7 +818,9 @@ private async Task PushReferences(IEnumerable references, BrowseRep } else { - var counts = await Task.WhenAll(relationships.ChunkBy(1000).Select(chunk => PushReferencesChunk(chunk, token))); + var counts = await Task.WhenAll( + relationships.ChunkBy(1000).Select(chunk => PushReferencesChunk(chunk, token)) + ); report.RelationshipsCreated += counts.Sum(); } @@ -607,7 +829,8 @@ private async Task PushReferences(IEnumerable references, BrowseRep public async Task ExecuteDeletes(DeletedNodes deletes, CancellationToken token) { - if (fullConfig.DryRun) return true; + if (fullConfig.DryRun) + return true; var tasks = new List(); if (deletes.Objects.Any()) @@ -641,97 +864,181 @@ public async Task ExecuteDeletes(DeletedNodes deletes, CancellationToken t /// Update list of nodes as assets in CDF Raw. /// /// Id, node map for the assets that should be pushed. - private async Task UpdateRawAssets(IDictionary assetMap, BrowseReport report, CancellationToken token) + private async Task UpdateRawAssets( + IDictionary assetMap, + BrowseReport report, + CancellationToken token + ) { - if (config.RawMetadata?.Database == null || config.RawMetadata?.AssetsTable == null) return; - await UpsertRawRows(config.RawMetadata.Database, config.RawMetadata.AssetsTable, rows => - { - if (rows == null) - { - return assetMap.Select(kvp => ( - kvp.Key, - update: PusherUtils.CreateRawUpdate(log, Extractor.StringConverter, kvp.Value, null, ConverterType.Node) - )).Where(elem => elem.update != null) - .ToDictionary(pair => pair.Key, pair => pair.update!.Value); - } - - var toWrite = new List<(string key, RawRow> row, BaseUANode node)>(); - - foreach (var row in rows) + if (config.RawMetadata?.Database == null || config.RawMetadata?.AssetsTable == null) + return; + await UpsertRawRows( + config.RawMetadata.Database, + config.RawMetadata.AssetsTable, + rows => { - if (assetMap.TryGetValue(row.Key, out var ts)) + if (rows == null) { - toWrite.Add((row.Key, row, ts)); - assetMap.Remove(row.Key); + return assetMap + .Select( + kvp => + ( + kvp.Key, + update: PusherUtils.CreateRawUpdate( + log, + Extractor.StringConverter, + kvp.Value, + null, + ConverterType.Node + ) + ) + ) + .Where(elem => elem.update != null) + .ToDictionary(pair => pair.Key, pair => pair.update!.Value); } - } - var updates = new Dictionary(); + var toWrite = + new List<( + string key, + RawRow> row, + BaseUANode node + )>(); - foreach (var (key, row, node) in toWrite) - { - var update = PusherUtils.CreateRawUpdate(log, Extractor.StringConverter, node, row, ConverterType.Node); - - if (update != null) + foreach (var row in rows) { - updates[key] = update.Value; - if (row == null) + if (assetMap.TryGetValue(row.Key, out var ts)) { - report.AssetsCreated++; + toWrite.Add((row.Key, row, ts)); + assetMap.Remove(row.Key); } - else + } + + var updates = new Dictionary(); + + foreach (var (key, row, node) in toWrite) + { + var update = PusherUtils.CreateRawUpdate( + log, + Extractor.StringConverter, + node, + row, + ConverterType.Node + ); + + if (update != null) { - report.AssetsUpdated++; + updates[key] = update.Value; + if (row == null) + { + report.AssetsCreated++; + } + else + { + report.AssetsUpdated++; + } } } - } - return updates; - }, null, token); + return updates; + }, + null, + token + ); } + /// /// Create list of nodes as assets in CDF Raw. /// This does not create rows if they already exist. /// /// Id, node map for the assets that should be pushed. - private async Task CreateRawAssets(IDictionary assetMap, BrowseReport report, CancellationToken token) + private async Task CreateRawAssets( + IDictionary assetMap, + BrowseReport report, + CancellationToken token + ) { - if (config.RawMetadata?.Database == null || config.RawMetadata?.AssetsTable == null) return; + if (config.RawMetadata?.Database == null || config.RawMetadata?.AssetsTable == null) + return; - await EnsureRawRows(config.RawMetadata.Database, config.RawMetadata.AssetsTable, assetMap.Keys, ids => - { - var assets = ids.Select(id => (assetMap[id], id)); - var creates = assets.Select(pair => (pair.Item1.ToJson(log, Extractor.StringConverter, ConverterType.Node), pair.id)) - .Where(pair => pair.Item1 != null) - .ToDictionary(pair => pair.id, pair => pair.Item1!.RootElement); - report.AssetsCreated += creates.Count; - return creates; - }, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }, token); + await EnsureRawRows( + config.RawMetadata.Database, + config.RawMetadata.AssetsTable, + assetMap.Keys, + ids => + { + var assets = ids.Select(id => (assetMap[id], id)); + var creates = assets + .Select( + pair => + ( + pair.Item1.ToJson( + log, + Extractor.StringConverter, + ConverterType.Node + ), + pair.id + ) + ) + .Where(pair => pair.Item1 != null) + .ToDictionary(pair => pair.id, pair => pair.Item1!.RootElement); + report.AssetsCreated += creates.Count; + return creates; + }, + new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }, + token + ); } + /// /// Create assets in CDF Clean. /// /// Id, node map for the assets that should be pushed. - private async Task> CreateAssets(IDictionary assetMap, BrowseReport report, CancellationToken token) + private async Task> CreateAssets( + IDictionary assetMap, + BrowseReport report, + CancellationToken token + ) { var assets = new List(); - foreach (var chunk in Chunking.ChunkByHierarchy(assetMap.Values, config.CdfChunking.Assets, node => node.Id, node => node.ParentId)) - { - var assetChunk = await destination.GetOrCreateAssetsAsync(chunk.Select(node => Extractor.GetUniqueId(node.Id)!), ids => - { - var assets = ids.Select(id => assetMap[id]); - var creates = assets - .Select(node => node.ToCDFAsset(fullConfig, Extractor, config.DataSet?.Id, config.MetadataMapping?.Assets)) - .Where(asset => asset != null); - report.AssetsCreated += creates.Count(); - return creates; - }, RetryMode.None, SanitationMode.Clean, token); + foreach ( + var chunk in Chunking.ChunkByHierarchy( + assetMap.Values, + config.CdfChunking.Assets, + node => node.Id, + node => node.ParentId + ) + ) + { + var assetChunk = await destination.GetOrCreateAssetsAsync( + chunk.Select(node => Extractor.GetUniqueId(node.Id)!), + ids => + { + var assets = ids.Select(id => assetMap[id]); + var creates = assets + .Select( + node => + node.ToCDFAsset( + fullConfig, + Extractor, + config.DataSet?.Id, + config.MetadataMapping?.Assets + ) + ) + .Where(asset => asset != null); + report.AssetsCreated += creates.Count(); + return creates; + }, + RetryMode.None, + SanitationMode.Clean, + token + ); log.LogResult(assetChunk, RequestType.CreateAssets, true); assetChunk.ThrowOnFatal(); - if (assetChunk.Results == null) continue; + if (assetChunk.Results == null) + continue; foreach (var asset in assetChunk.Results) { @@ -741,14 +1048,20 @@ private async Task> CreateAssets(IDictionary /// Update assets in CDF Clean. /// /// Id, node map for the assets that should be pushed. /// List of existing assets in CDF. /// Configuration for which fields should be updated. - private async Task UpdateAssets(IDictionary assetMap, IEnumerable assets, - TypeUpdateConfig update, BrowseReport report, CancellationToken token) + private async Task UpdateAssets( + IDictionary assetMap, + IEnumerable assets, + TypeUpdateConfig update, + BrowseReport report, + CancellationToken token + ) { var updates = new List(); var existing = assets.ToDictionary(asset => asset.ExternalId); @@ -756,11 +1069,22 @@ private async Task UpdateAssets(IDictionary assetMap, IEnume { if (existing.TryGetValue(kvp.Key, out var asset)) { - var assetUpdate = PusherUtils.GetAssetUpdate(fullConfig, asset, kvp.Value, Extractor, update); - - if (assetUpdate == null) continue; - if (assetUpdate.ParentExternalId != null || assetUpdate.Description != null - || assetUpdate.Name != null || assetUpdate.Metadata != null) + var assetUpdate = PusherUtils.GetAssetUpdate( + fullConfig, + asset, + kvp.Value, + Extractor, + update + ); + + if (assetUpdate == null) + continue; + if ( + assetUpdate.ParentExternalId != null + || assetUpdate.Description != null + || assetUpdate.Name != null + || assetUpdate.Metadata != null + ) { updates.Add(new AssetUpdateItem(asset.ExternalId) { Update = assetUpdate }); } @@ -768,7 +1092,12 @@ private async Task UpdateAssets(IDictionary assetMap, IEnume } if (updates.Any()) { - var res = await destination.UpdateAssetsAsync(updates, RetryMode.OnError, SanitationMode.Clean, token); + var res = await destination.UpdateAssetsAsync( + updates, + RetryMode.OnError, + SanitationMode.Clean, + token + ); log.LogResult(res, RequestType.UpdateAssets, false); @@ -777,27 +1106,28 @@ private async Task UpdateAssets(IDictionary assetMap, IEnume report.AssetsUpdated += res.Results?.Count() ?? 0; } } + /// - /// Master method for pushing assets to CDF raw or clean. + /// Master method for pushing assets to CDF raw. /// /// Assets to push /// Configuration for which fields, if any, to update in CDF - private async Task PushAssets( - IEnumerable objects, + private async Task PushRawAssets( + ConcurrentDictionary assetsMap, TypeUpdateConfig update, BrowseReport report, - CancellationToken token) + CancellationToken token + ) { - if (config.SkipMetadata) return; + if (config.SkipMetadata) + return; - var assetIds = new ConcurrentDictionary(objects - .Where(node => node.Source != NodeSource.CDF) - .ToDictionary(obj => Extractor.GetUniqueId(obj.Id)!)); - - if (!assetIds.Any()) return; + if (!assetsMap.Any()) + return; var metaMap = config.MetadataMapping?.Assets; - bool useRawAssets = config.RawMetadata != null + bool useRawAssets = + config.RawMetadata != null && !string.IsNullOrWhiteSpace(config.RawMetadata.Database) && !string.IsNullOrWhiteSpace(config.RawMetadata.AssetsTable); @@ -805,49 +1135,58 @@ private async Task PushAssets( { if (update.AnyUpdate) { - await UpdateRawAssets(assetIds, report, token); + await UpdateRawAssets(assetsMap, report, token); } else { - await CreateRawAssets(assetIds, report, token); - } - } - else - { - var assets = await CreateAssets(assetIds, report, token); - - if (update.AnyUpdate) - { - await UpdateAssets(assetIds, assets, update, report, token); + await CreateRawAssets(assetsMap, report, token); } } } private async Task MarkAssetsAsDeleted( IEnumerable externalIds, - CancellationToken token) + CancellationToken token + ) { - bool useRawAssets = config.RawMetadata != null + bool useRawAssets = + config.RawMetadata != null && !string.IsNullOrWhiteSpace(config.RawMetadata.Database) && !string.IsNullOrWhiteSpace(config.RawMetadata.AssetsTable); if (useRawAssets) { - await MarkRawRowsAsDeleted(config.RawMetadata!.Database!, config.RawMetadata!.AssetsTable!, externalIds, token); + await MarkRawRowsAsDeleted( + config.RawMetadata!.Database!, + config.RawMetadata!.AssetsTable!, + externalIds, + token + ); } else { - var updates = externalIds.Select(extId => new AssetUpdateItem(extId) - { - Update = new AssetUpdate - { - Metadata = new UpdateDictionary(new Dictionary + var updates = externalIds.Select( + extId => + new AssetUpdateItem(extId) { - { fullConfig.Extraction.Deletes.DeleteMarker, "true" } - }, Enumerable.Empty()) - } - }); - var result = await destination.UpdateAssetsAsync(updates, RetryMode.OnError, SanitationMode.Clean, token); + Update = new AssetUpdate + { + Metadata = new UpdateDictionary( + new Dictionary + { + { fullConfig.Extraction.Deletes.DeleteMarker, "true" } + }, + Enumerable.Empty() + ) + } + } + ); + var result = await destination.UpdateAssetsAsync( + updates, + RetryMode.OnError, + SanitationMode.Clean, + token + ); log.LogResult(result, RequestType.UpdateAssets, true); result.ThrowOnFatal(); } @@ -863,55 +1202,86 @@ private async Task MarkAssetsAsDeleted( private async Task UpdateRawTimeseries( IDictionary tsMap, BrowseReport report, - CancellationToken token) + CancellationToken token + ) { - if (config.RawMetadata?.Database == null || config.RawMetadata.TimeseriesTable == null) return; - - await UpsertRawRows(config.RawMetadata.Database, config.RawMetadata.TimeseriesTable, rows => - { - if (rows == null) - { - return tsMap.Select(kvp => ( - kvp.Key, - update: PusherUtils.CreateRawUpdate(log, Extractor.StringConverter, kvp.Value, null, ConverterType.Variable) - )).Where(elem => elem.update != null) - .ToDictionary(pair => pair.Key, pair => pair.update!.Value); - } - - var toWrite = new List<(string key, RawRow> row, UAVariable node)>(); + if (config.RawMetadata?.Database == null || config.RawMetadata.TimeseriesTable == null) + return; - foreach (var row in rows) + await UpsertRawRows( + config.RawMetadata.Database, + config.RawMetadata.TimeseriesTable, + rows => { - if (tsMap.TryGetValue(row.Key, out var ts)) + if (rows == null) { - toWrite.Add((row.Key, row, ts)); - tsMap.Remove(row.Key); + return tsMap + .Select( + kvp => + ( + kvp.Key, + update: PusherUtils.CreateRawUpdate( + log, + Extractor.StringConverter, + kvp.Value, + null, + ConverterType.Variable + ) + ) + ) + .Where(elem => elem.update != null) + .ToDictionary(pair => pair.Key, pair => pair.update!.Value); } - } - var updates = new Dictionary(); + var toWrite = + new List<( + string key, + RawRow> row, + UAVariable node + )>(); - foreach (var (key, row, node) in toWrite) - { - var update = PusherUtils.CreateRawUpdate(log, Extractor.StringConverter, node, row, ConverterType.Variable); - - if (update != null) + foreach (var row in rows) { - updates[key] = update.Value; - if (row == null) + if (tsMap.TryGetValue(row.Key, out var ts)) { - report.TimeSeriesCreated++; + toWrite.Add((row.Key, row, ts)); + tsMap.Remove(row.Key); } - else + } + + var updates = new Dictionary(); + + foreach (var (key, row, node) in toWrite) + { + var update = PusherUtils.CreateRawUpdate( + log, + Extractor.StringConverter, + node, + row, + ConverterType.Variable + ); + + if (update != null) { - report.TimeSeriesUpdated++; + updates[key] = update.Value; + if (row == null) + { + report.TimeSeriesCreated++; + } + else + { + report.TimeSeriesUpdated++; + } } } - } - return updates; - }, null, token); + return updates; + }, + null, + token + ); } + /// /// Create list of nodes as timeseries in CDF Raw. /// This does not create rows if they already exist. @@ -920,21 +1290,42 @@ await UpsertRawRows(config.RawMetadata.Database, config.RawMetadata private async Task CreateRawTimeseries( IDictionary tsMap, BrowseReport report, - CancellationToken token) + CancellationToken token + ) { - if (config.RawMetadata?.Database == null || config.RawMetadata.TimeseriesTable == null) return; + if (config.RawMetadata?.Database == null || config.RawMetadata.TimeseriesTable == null) + return; - await EnsureRawRows(config.RawMetadata.Database, config.RawMetadata.TimeseriesTable, tsMap.Keys, ids => - { - var timeseries = ids.Select(id => (tsMap[id], id)); - var creates = timeseries.Select(pair => (pair.Item1.ToJson(log, Extractor.StringConverter, ConverterType.Variable), pair.id)) - .Where(pair => pair.Item1 != null) - .ToDictionary(pair => pair.id, pair => pair.Item1!.RootElement); - - report.TimeSeriesCreated += creates.Count; - return creates; - }, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }, token); + await EnsureRawRows( + config.RawMetadata.Database, + config.RawMetadata.TimeseriesTable, + tsMap.Keys, + ids => + { + var timeseries = ids.Select(id => (tsMap[id], id)); + var creates = timeseries + .Select( + pair => + ( + pair.Item1.ToJson( + log, + Extractor.StringConverter, + ConverterType.Variable + ), + pair.id + ) + ) + .Where(pair => pair.Item1 != null) + .ToDictionary(pair => pair.id, pair => pair.Item1!.RootElement); + + report.TimeSeriesCreated += creates.Count; + return creates; + }, + new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }, + token + ); } + /// /// Create timeseries in CDF Clean, optionally creates only minimal timeseries with no metadata or context. /// @@ -944,36 +1335,48 @@ private async Task> CreateTimeseries( IDictionary tsMap, BrowseReport report, bool createMinimalTimeseries, - CancellationToken token) + CancellationToken token + ) { - var timeseries = await destination.GetOrCreateTimeSeriesAsync(tsMap.Keys, ids => - { - var tss = ids.Select(id => tsMap[id]); - var creates = tss.Select(ts => ts.ToTimeseries( - fullConfig, - Extractor, - Extractor, - config.DataSet?.Id, - nodeToAssetIds, - config.MetadataMapping?.Timeseries, - createMinimalTimeseries)) - .Where(ts => ts != null); - if (createMinimalTimeseries) - { - report.MinimalTimeSeriesCreated += creates.Count(); - } - else + var timeseries = await destination.GetOrCreateTimeSeriesAsync( + tsMap.Keys, + ids => { - report.TimeSeriesCreated += creates.Count(); - } - return creates; - }, RetryMode.None, SanitationMode.Clean, token); + var tss = ids.Select(id => tsMap[id]); + var creates = tss.Select( + ts => + ts.ToTimeseries( + fullConfig, + Extractor, + Extractor, + config.DataSet?.Id, + nodeToAssetIds, + config.MetadataMapping?.Timeseries, + createMinimalTimeseries + ) + ) + .Where(ts => ts != null); + if (createMinimalTimeseries) + { + report.MinimalTimeSeriesCreated += creates.Count(); + } + else + { + report.TimeSeriesCreated += creates.Count(); + } + return creates; + }, + RetryMode.None, + SanitationMode.Clean, + token + ); log.LogResult(timeseries, RequestType.CreateTimeSeries, true); timeseries.ThrowOnFatal(); - if (timeseries.Results == null) return Array.Empty(); + if (timeseries.Results == null) + return Array.Empty(); var foundBadTimeseries = new List(); foreach (var ts in timeseries.Results) @@ -991,11 +1394,15 @@ private async Task> CreateTimeseries( } if (foundBadTimeseries.Any()) { - log.LogDebug("Found mismatched timeseries when ensuring: {TimeSeries}", string.Join(", ", foundBadTimeseries)); + log.LogDebug( + "Found mismatched timeseries when ensuring: {TimeSeries}", + string.Join(", ", foundBadTimeseries) + ); } return timeseries.Results; } + /// /// Update timeseries in CDF Clean. /// @@ -1007,7 +1414,8 @@ private async Task UpdateTimeseries( IEnumerable timeseries, TypeUpdateConfig update, BrowseReport report, - CancellationToken token) + CancellationToken token + ) { var updates = new List(); var existing = timeseries.ToDictionary(asset => asset.ExternalId); @@ -1015,10 +1423,22 @@ private async Task UpdateTimeseries( { if (existing.TryGetValue(kvp.Key, out var ts)) { - var tsUpdate = PusherUtils.GetTSUpdate(fullConfig, Extractor, ts, kvp.Value, update, nodeToAssetIds); - if (tsUpdate == null) continue; - if (tsUpdate.AssetId != null || tsUpdate.Description != null - || tsUpdate.Name != null || tsUpdate.Metadata != null) + var tsUpdate = PusherUtils.GetTSUpdate( + fullConfig, + Extractor, + ts, + kvp.Value, + update, + nodeToAssetIds + ); + if (tsUpdate == null) + continue; + if ( + tsUpdate.AssetId != null + || tsUpdate.Description != null + || tsUpdate.Name != null + || tsUpdate.Metadata != null + ) { updates.Add(new TimeSeriesUpdateItem(ts.ExternalId) { Update = tsUpdate }); } @@ -1027,7 +1447,12 @@ private async Task UpdateTimeseries( if (updates.Any()) { - var res = await destination.UpdateTimeSeriesAsync(updates, RetryMode.OnError, SanitationMode.Clean, token); + var res = await destination.UpdateTimeSeriesAsync( + updates, + RetryMode.OnError, + SanitationMode.Clean, + token + ); log.LogResult(res, RequestType.UpdateTimeSeries, false); res.ThrowOnFatal(); @@ -1041,66 +1466,69 @@ private async Task UpdateTimeseries( /// /// Timeseries to push /// Configuration for which fields, if any, to update in CDF - private async Task PushTimeseries( - IEnumerable tsList, + private async Task PushRawTimeseries( + ConcurrentDictionary tsIds, TypeUpdateConfig update, BrowseReport report, - CancellationToken token) + CancellationToken token + ) { - var tsIds = new ConcurrentDictionary( - tsList.ToDictionary(ts => ts.GetUniqueId(Extractor)!)); - bool useRawTimeseries = config.RawMetadata != null - && !string.IsNullOrWhiteSpace(config.RawMetadata.Database) - && !string.IsNullOrWhiteSpace(config.RawMetadata.TimeseriesTable); - - bool simpleTimeseries = useRawTimeseries || config.SkipMetadata; - - var timeseries = await CreateTimeseries(tsIds, report, simpleTimeseries, token); - - var toPushMeta = tsIds.Where(kvp => kvp.Value.Source != NodeSource.CDF) + var toPushMeta = tsIds + .Where(kvp => kvp.Value.Source != NodeSource.CDF) .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); - if (config.SkipMetadata || !toPushMeta.Any()) return; - - if (useRawTimeseries) + if (update.AnyUpdate) { - if (update.AnyUpdate) - { - await UpdateRawTimeseries(toPushMeta, report, token); - } - else - { - await CreateRawTimeseries(toPushMeta, report, token); - } + await UpdateRawTimeseries(toPushMeta, report, token); } - else if (update.AnyUpdate) + else { - await UpdateTimeseries(toPushMeta, timeseries, update, report, token); + await CreateRawTimeseries(toPushMeta, report, token); } } - private async Task MarkTimeSeriesAsDeleted(IEnumerable externalIds, CancellationToken token) + private async Task MarkTimeSeriesAsDeleted( + IEnumerable externalIds, + CancellationToken token + ) { - bool useRawTss = config.RawMetadata != null + bool useRawTss = + config.RawMetadata != null && !string.IsNullOrWhiteSpace(config.RawMetadata.Database) && !string.IsNullOrWhiteSpace(config.RawMetadata.TimeseriesTable); if (useRawTss) { - await MarkRawRowsAsDeleted(config.RawMetadata!.Database!, config.RawMetadata!.TimeseriesTable!, externalIds, token); + await MarkRawRowsAsDeleted( + config.RawMetadata!.Database!, + config.RawMetadata!.TimeseriesTable!, + externalIds, + token + ); } - var updates = externalIds.Select(extId => new TimeSeriesUpdateItem(extId) - { - Update = new TimeSeriesUpdate - { - Metadata = new UpdateDictionary(new Dictionary + var updates = externalIds.Select( + extId => + new TimeSeriesUpdateItem(extId) { - { fullConfig.Extraction.Deletes.DeleteMarker, "true" } - }, Enumerable.Empty()) - } - }); - var result = await destination.UpdateTimeSeriesAsync(updates, RetryMode.OnError, SanitationMode.Clean, token); + Update = new TimeSeriesUpdate + { + Metadata = new UpdateDictionary( + new Dictionary + { + { fullConfig.Extraction.Deletes.DeleteMarker, "true" } + }, + Enumerable.Empty() + ) + } + } + ); + var result = await destination.UpdateTimeSeriesAsync( + updates, + RetryMode.OnError, + SanitationMode.Clean, + token + ); log.LogResult(result, RequestType.UpdateAssets, true); result.ThrowOnFatal(); } @@ -1124,19 +1552,22 @@ private async Task EnsureRawRows( IEnumerable keys, Func, IDictionary> dtoBuilder, JsonSerializerOptions options, - CancellationToken token) + CancellationToken token + ) { var rows = await GetRawRows(dbName, tableName, new[] { "," }, token); var existing = rows.Select(row => row.Key); var toCreate = keys.Except(existing); - if (!toCreate.Any()) return; + if (!toCreate.Any()) + return; log.LogInformation("Creating {Count} raw rows in CDF", toCreate.Count()); var createDtos = dtoBuilder(toCreate); await destination.InsertRawRowsAsync(dbName, tableName, createDtos, options, token); } + /// /// Insert or update raw rows given by in table /// given by and . @@ -1150,9 +1581,13 @@ private async Task EnsureRawRows( private async Task UpsertRawRows( string dbName, string tableName, - Func>>?, IDictionary> dtoBuilder, + Func< + IEnumerable>>?, + IDictionary + > dtoBuilder, JsonSerializerOptions? options, - CancellationToken token) + CancellationToken token + ) { int count = 0; async Task CallAndCreate(IEnumerable>>? rows) @@ -1167,8 +1602,15 @@ async Task CallAndCreate(IEnumerable>>? r { try { - var result = await destination.CogniteClient.Raw.ListRowsAsync>(dbName, tableName, - new RawRowQuery { Cursor = cursor, Limit = 10_000 }, null, token); + var result = await destination.CogniteClient.Raw.ListRowsAsync< + Dictionary + >( + dbName, + tableName, + new RawRowQuery { Cursor = cursor, Limit = 10_000 }, + null, + token + ); cursor = result.NextCursor; await CallAndCreate(result.Items); @@ -1189,7 +1631,8 @@ public async Task>>> GetRawRo string dbName, string tableName, IEnumerable? columns, - CancellationToken token) + CancellationToken token + ) { string? cursor = null; var rows = new List>>(); @@ -1197,8 +1640,20 @@ public async Task>>> GetRawRo { try { - var result = await destination.CogniteClient.Raw.ListRowsAsync>(dbName, tableName, - new RawRowQuery { Cursor = cursor, Limit = 10_000, Columns = columns }, null, token); + var result = await destination.CogniteClient.Raw.ListRowsAsync< + Dictionary + >( + dbName, + tableName, + new RawRowQuery + { + Cursor = cursor, + Limit = 10_000, + Columns = columns + }, + null, + token + ); rows.AddRange(result.Items); cursor = result.NextCursor; } @@ -1211,7 +1666,12 @@ public async Task>>> GetRawRo return rows; } - private async Task MarkRawRowsAsDeleted(string dbName, string tableName, IEnumerable keys, CancellationToken token) + private async Task MarkRawRowsAsDeleted( + string dbName, + string tableName, + IEnumerable keys, + CancellationToken token + ) { var keySet = new HashSet(keys); var rows = await GetRawRows(dbName, tableName, keys, token); @@ -1221,7 +1681,12 @@ private async Task MarkRawRowsAsDeleted(string dbName, string tableName, IEnumer { row.Columns[fullConfig.Extraction.Deletes.DeleteMarker] = trueElem; } - await destination.InsertRawRowsAsync(dbName, tableName, toMark.ToDictionary(e => e.Key, e => e.Columns), token); + await destination.InsertRawRowsAsync( + dbName, + tableName, + toMark.ToDictionary(e => e.Key, e => e.Columns), + token + ); } #endregion @@ -1231,9 +1696,13 @@ private async Task MarkRawRowsAsDeleted(string dbName, string tableName, IEnumer /// Create the given list of relationships in CDF, handles duplicates. /// /// Relationships to create - private async Task PushReferencesChunk(IEnumerable relationships, CancellationToken token) + private async Task PushReferencesChunk( + IEnumerable relationships, + CancellationToken token + ) { - if (!relationships.Any()) return 0; + if (!relationships.Any()) + return 0; try { await destination.CogniteClient.Relationships.CreateAsync(relationships, token); @@ -1254,9 +1723,12 @@ private async Task PushReferencesChunk(IEnumerable rela } } } - if (!existing.Any()) throw; + if (!existing.Any()) + throw; - relationships = relationships.Where(rel => !existing.Contains(rel.ExternalId)).ToList(); + relationships = relationships + .Where(rel => !existing.Contains(rel.ExternalId)) + .ToList(); return await PushReferencesChunk(relationships, token); } else @@ -1265,13 +1737,22 @@ private async Task PushReferencesChunk(IEnumerable rela } } } + /// /// Create the given list of relationships in CDF Raw, skips rows that already exist. /// /// Relationships to create. - private async Task PushRawReferences(IEnumerable relationships, BrowseReport report, CancellationToken token) + private async Task PushRawReferences( + IEnumerable relationships, + BrowseReport report, + CancellationToken token + ) { - if (config.RawMetadata?.Database == null || config.RawMetadata.RelationshipsTable == null) return; + if ( + config.RawMetadata?.Database == null + || config.RawMetadata.RelationshipsTable == null + ) + return; await EnsureRawRows( config.RawMetadata.Database, @@ -1280,30 +1761,46 @@ await EnsureRawRows( ids => { var idSet = ids.ToHashSet(); - var creates = relationships.Where(rel => idSet.Contains(rel.ExternalId)).ToDictionary(rel => rel.ExternalId); + var creates = relationships + .Where(rel => idSet.Contains(rel.ExternalId)) + .ToDictionary(rel => rel.ExternalId); report.RelationshipsCreated += creates.Count; return creates; }, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }, - token); + token + ); } - private async Task MarkReferencesAsDeleted(IEnumerable externalIds, CancellationToken token) + private async Task MarkReferencesAsDeleted( + IEnumerable externalIds, + CancellationToken token + ) { - bool useRawRelationships = config.RawMetadata != null + bool useRawRelationships = + config.RawMetadata != null && !string.IsNullOrWhiteSpace(config.RawMetadata.Database) && !string.IsNullOrWhiteSpace(config.RawMetadata.RelationshipsTable); if (useRawRelationships) { - await MarkRawRowsAsDeleted(config.RawMetadata!.Database!, config.RawMetadata!.RelationshipsTable!, externalIds, token); + await MarkRawRowsAsDeleted( + config.RawMetadata!.Database!, + config.RawMetadata!.RelationshipsTable!, + externalIds, + token + ); } else if (config.DeleteRelationships) { - var tasks = externalIds.ChunkBy(1000).Select(chunk => destination.CogniteClient.Relationships.DeleteAsync(chunk, true, token)); + var tasks = externalIds + .ChunkBy(1000) + .Select( + chunk => + destination.CogniteClient.Relationships.DeleteAsync(chunk, true, token) + ); await Task.WhenAll(tasks); } - } #endregion @@ -1321,9 +1818,14 @@ private async Task EnsureConfigInit(CancellationToken token) } catch (ResponseException ex) { - log.LogError("Could not fetch data set by external id. It may not exist, or the user may lack" + - " sufficient access rights. Project {Project} at {Host}, id {Id}: {Message}", - config.Project, config.Host, config.DataSet.ExternalId, ex.Message); + log.LogError( + "Could not fetch data set by external id. It may not exist, or the user may lack" + + " sufficient access rights. Project {Project} at {Host}, id {Id}: {Message}", + config.Project, + config.Host, + config.DataSet.ExternalId, + ex.Message + ); throw; } } From b450ec524981fded72c9d2437c3ca4120df7dcfb Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 27 Jun 2023 09:02:22 +0200 Subject: [PATCH 02/26] latest --- .config/dotnet-tools.json | 10 +- Extractor/Pushers/CDFPusher.cs | 644 ++++++++++++----------------- MQTTCDFBridge/MQTTCDFBridge.csproj | 2 +- Test/Test.csproj | 2 +- Test/Unit/DeleteTest.cs | 4 + 5 files changed, 278 insertions(+), 384 deletions(-) diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json index 49149206..ec7d3358 100644 --- a/.config/dotnet-tools.json +++ b/.config/dotnet-tools.json @@ -3,16 +3,10 @@ "isRoot": true, "tools": { "microsoft.sbom.dotnettool": { - "version": "1.1.5", + "version": "1.1.6", "commands": [ "sbom-tool" ] - }, - "paket": { - "version": "7.2.1", - "commands": [ - "paket" - ] } } -} \ No newline at end of file +} diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index 4dbd2f51..45c0b79c 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -57,21 +57,13 @@ public sealed class CDFPusher : IPusher public PusherInput? PendingNodes { get; set; } private UAExtractor extractor; - private bool pushCleanAssets; - private bool pushCleanTimeseries; - - public UAExtractor Extractor - { - get => extractor; - set + public UAExtractor Extractor { get => extractor; set { + extractor = value; + if (fdmDestination != null) { - extractor = value; - if (fdmDestination != null) - { - fdmDestination.Extractor = value; - } + fdmDestination.Extractor = value; } - } + } } public IPusherConfig BaseConfig { get; } private readonly HashSet mismatchedTimeseries = new HashSet(); @@ -80,14 +72,20 @@ public UAExtractor Extractor private readonly BrowseCallback? callback; private readonly FDMWriter? fdmDestination; + private bool pushCleanAssets => + string.IsNullOrWhiteSpace(config.RawMetadata?.Database) + && string.IsNullOrWhiteSpace(config.RawMetadata?.AssetsTable); + private bool pushCleanTimeseries => + string.IsNullOrWhiteSpace(config.RawMetadata?.Database) + && string.IsNullOrWhiteSpace(config.RawMetadata?.TimeseriesTable); + public CDFPusher( ILogger log, FullConfig fullConfig, CognitePusherConfig config, CogniteDestination destination, - IServiceProvider provider - ) + IServiceProvider provider) { extractor = null!; this.log = log; @@ -95,77 +93,40 @@ IServiceProvider provider BaseConfig = config; this.destination = destination; this.fullConfig = fullConfig; - if ( - config.BrowseCallback != null - && ( - config.BrowseCallback.Id.HasValue - || !string.IsNullOrEmpty(config.BrowseCallback.ExternalId) - ) - ) + if (config.BrowseCallback != null && (config.BrowseCallback.Id.HasValue || !string.IsNullOrEmpty(config.BrowseCallback.ExternalId))) { callback = new BrowseCallback(destination, config.BrowseCallback, log); } if (config.FlexibleDataModels != null && config.FlexibleDataModels.Enabled) { - fdmDestination = new FDMWriter( - provider.GetRequiredService(), - destination, - provider.GetRequiredService>() - ); + fdmDestination = new FDMWriter(provider.GetRequiredService(), destination, + provider.GetRequiredService>()); } - - pushCleanAssets = - string.IsNullOrWhiteSpace(config.RawMetadata?.Database) - && string.IsNullOrWhiteSpace(config.RawMetadata?.AssetsTable); - - pushCleanTimeseries = - string.IsNullOrWhiteSpace(config.RawMetadata?.Database) - && string.IsNullOrWhiteSpace(config.RawMetadata?.TimeseriesTable); } - private static readonly Counter dataPointsCounter = Metrics.CreateCounter( - "opcua_datapoints_pushed_cdf", - "Number of datapoints pushed to CDF" - ); - private static readonly Counter dataPointPushes = Metrics.CreateCounter( - "opcua_datapoint_pushes_cdf", - "Number of times datapoints have been pushed to CDF" - ); - private static readonly Counter dataPointPushFailures = Metrics.CreateCounter( - "opcua_datapoint_push_failures_cdf", - "Number of completely failed pushes of datapoints to CDF" - ); - private static readonly Counter eventCounter = Metrics.CreateCounter( - "opcua_events_pushed_cdf", - "Number of events pushed to CDF" - ); - private static readonly Counter eventPushCounter = Metrics.CreateCounter( - "opcua_event_pushes_cdf", - "Number of times events have been pushed to CDF" - ); - private static readonly Counter eventPushFailures = Metrics.CreateCounter( - "opcua_event_push_failures_cdf", - "Number of times events have been pushed to CDF" - ); - private static readonly Counter nodeEnsuringFailures = Metrics.CreateCounter( - "opcua_node_ensure_failures_cdf", - "Number of completely failed requests to CDF when ensuring assets/timeseries exist" - ); - private static readonly Counter skippedEvents = Metrics.CreateCounter( - "opcua_skipped_events_cdf", - "Number of events skipped by CDF pusher" - ); - private static readonly Gauge missingTimeseriesCnt = Metrics.CreateGauge( - "opcua_missing_timeseries", - "Number of distinct timeseries that have been found to be missing in CDF" - ); - private static readonly Gauge mismatchedTimeseriesCnt = Metrics.CreateGauge( - "opcua_mismatched_timeseries", - "Number of distinct timeseries that have been found to have different types in OPC-UA and in CDF" - ); + private static readonly Counter dataPointsCounter = Metrics + .CreateCounter("opcua_datapoints_pushed_cdf", "Number of datapoints pushed to CDF"); + private static readonly Counter dataPointPushes = Metrics + .CreateCounter("opcua_datapoint_pushes_cdf", "Number of times datapoints have been pushed to CDF"); + private static readonly Counter dataPointPushFailures = Metrics + .CreateCounter("opcua_datapoint_push_failures_cdf", "Number of completely failed pushes of datapoints to CDF"); + private static readonly Counter eventCounter = Metrics + .CreateCounter("opcua_events_pushed_cdf", "Number of events pushed to CDF"); + private static readonly Counter eventPushCounter = Metrics + .CreateCounter("opcua_event_pushes_cdf", "Number of times events have been pushed to CDF"); + private static readonly Counter eventPushFailures = Metrics + .CreateCounter("opcua_event_push_failures_cdf", "Number of times events have been pushed to CDF"); + private static readonly Counter nodeEnsuringFailures = Metrics + .CreateCounter("opcua_node_ensure_failures_cdf", + "Number of completely failed requests to CDF when ensuring assets/timeseries exist"); + private static readonly Counter skippedEvents = Metrics + .CreateCounter("opcua_skipped_events_cdf", "Number of events skipped by CDF pusher"); + private static readonly Gauge missingTimeseriesCnt = Metrics + .CreateGauge("opcua_missing_timeseries", "Number of distinct timeseries that have been found to be missing in CDF"); + private static readonly Gauge mismatchedTimeseriesCnt = Metrics + .CreateGauge("opcua_mismatched_timeseries", "Number of distinct timeseries that have been found to have different types in OPC-UA and in CDF"); private readonly ILogger log; - #region Interface @@ -173,72 +134,45 @@ IServiceProvider provider /// Attempts to push the given list of datapoints to CDF. /// ' /// True if push succeeded, false if it failed, null if there were no points to push. - public async Task PushDataPoints( - IEnumerable points, - CancellationToken token - ) + public async Task PushDataPoints(IEnumerable points, CancellationToken token) { - if (points == null) - return null; + if (points == null) return null; Dictionary> dataPointList = points .Where(dp => dp.Timestamp > DateTime.UnixEpoch) .GroupBy(dp => dp.Id) - .Where( - group => - !mismatchedTimeseries.Contains(group.Key) - && !missingTimeseries.Contains(group.Key) - ) + .Where(group => !mismatchedTimeseries.Contains(group.Key) + && !missingTimeseries.Contains(group.Key)) .ToDictionary(group => group.Key, group => group.ToList()); int count = dataPointList.Aggregate(0, (seed, points) => seed + points.Value.Count); - if (count == 0) - return null; + if (count == 0) return null; - var inserts = dataPointList.ToDictionary( - kvp => Identity.Create(kvp.Key), - kvp => - kvp.Value.Select( - dp => - dp.IsString - ? new Datapoint(dp.Timestamp, dp.StringValue) - : new Datapoint(dp.Timestamp, dp.DoubleValue.Value) - ) - ); + var inserts = dataPointList.ToDictionary(kvp => + Identity.Create(kvp.Key), + kvp => kvp.Value.Select( + dp => dp.IsString ? new Datapoint(dp.Timestamp, dp.StringValue) : new Datapoint(dp.Timestamp, dp.DoubleValue.Value)) + ); if (fullConfig.DryRun) { - log.LogInformation( - "Dry run enabled. Would insert {Count} datapoints over {C2} timeseries to CDF", - count, - inserts.Count - ); + log.LogInformation("Dry run enabled. Would insert {Count} datapoints over {C2} timeseries to CDF", count, inserts.Count); return null; } try { - var result = await destination.InsertDataPointsAsync( - inserts, - SanitationMode.Clean, - RetryMode.OnError, - token - ); + var result = await destination.InsertDataPointsAsync(inserts, SanitationMode.Clean, RetryMode.OnError, token); int realCount = count; log.LogResult(result, RequestType.CreateDatapoints, false, LogLevel.Debug); if (result.Errors != null) { - var missing = result.Errors.FirstOrDefault( - err => err.Type == ErrorType.ItemMissing - ); + var missing = result.Errors.FirstOrDefault(err => err.Type == ErrorType.ItemMissing); if (missing?.Skipped != null) { - log.LogError( - "Failed to push datapoints to CDF, missing ids: {Ids}", - missing.Skipped.Select(ms => ms.Id) - ); + log.LogError("Failed to push datapoints to CDF, missing ids: {Ids}", missing.Skipped.Select(ms => ms.Id)); foreach (var skipped in missing.Skipped) { missingTimeseries.Add(skipped.Id.ExternalId); @@ -246,15 +180,10 @@ CancellationToken token missingTimeseriesCnt.Set(missing.Skipped.Count()); } - var mismatched = result.Errors.FirstOrDefault( - err => err.Type == ErrorType.MismatchedType - ); + var mismatched = result.Errors.FirstOrDefault(err => err.Type == ErrorType.MismatchedType); if (mismatched?.Skipped != null) { - log.LogError( - "Failed to push datapoints to CDF, mismatched timeseries: {Ids}", - mismatched.Skipped.Select(ms => ms.Id) - ); + log.LogError("Failed to push datapoints to CDF, mismatched timeseries: {Ids}", mismatched.Skipped.Select(ms => ms.Id)); foreach (var skipped in mismatched.Skipped) { mismatchedTimeseries.Add(skipped.Id.ExternalId); @@ -274,18 +203,15 @@ CancellationToken token } } + + result.ThrowOnFatal(); - log.LogDebug( - "Successfully pushed {Real} / {Total} points to CDF", - realCount, - count - ); + log.LogDebug("Successfully pushed {Real} / {Total} points to CDF", realCount, count); dataPointPushes.Inc(); dataPointsCounter.Inc(realCount); - if (realCount == 0) - return null; + if (realCount == 0) return null; } catch (Exception e) { @@ -297,23 +223,18 @@ CancellationToken token return true; } - /// /// Attempts to push the given list of events to CDF. /// /// True if push succeeded, false if it failed, null if there were no events to push. public async Task PushEvents(IEnumerable events, CancellationToken token) { - if (events == null) - return null; + if (events == null) return null; var eventList = new List(); int count = 0; foreach (var buffEvent in events) { - if ( - buffEvent.Time < PusherUtils.CogniteMinTime - || buffEvent.Time > PusherUtils.CogniteMaxTime - ) + if (buffEvent.Time < PusherUtils.CogniteMinTime || buffEvent.Time > PusherUtils.CogniteMaxTime) { skippedEvents.Inc(); continue; @@ -322,8 +243,7 @@ CancellationToken token count++; } - if (count == 0) - return null; + if (count == 0) return null; if (fullConfig.DryRun) { @@ -333,40 +253,24 @@ CancellationToken token try { - var result = await destination.EnsureEventsExistsAsync( - eventList - .Select( - evt => evt.ToCDFEvent(Extractor, config.DataSet?.Id, nodeToAssetIds) - ) - .Where(evt => evt != null), - RetryMode.OnError, - SanitationMode.Clean, - token - ); + var result = await destination.EnsureEventsExistsAsync(eventList + .Select(evt => evt.ToCDFEvent(Extractor, config.DataSet?.Id, nodeToAssetIds)) + .Where(evt => evt != null), RetryMode.OnError, SanitationMode.Clean, token); log.LogResult(result, RequestType.CreateEvents, false, LogLevel.Debug); int skipped = 0; if (result.Errors != null) { - skipped = result.Errors.Aggregate( - 0, - (seed, err) => seed + (err.Skipped?.Count() ?? 0) - ); + skipped = result.Errors.Aggregate(0, (seed, err) => + seed + (err.Skipped?.Count() ?? 0)); - var fatalError = result.Errors.FirstOrDefault( - err => err.Type == ErrorType.FatalFailure - ); + var fatalError = result.Errors.FirstOrDefault(err => err.Type == ErrorType.FatalFailure); if (fatalError != null) { - log.LogError( - "Failed to push {NumFailedEvents} events to CDF: {Message}", - count, - fatalError.Exception?.Message - ); + log.LogError("Failed to push {NumFailedEvents} events to CDF: {Message}", count, fatalError.Exception?.Message); eventPushFailures.Inc(); - return fatalError.Exception is ResponseException rex - && (rex.Code == 400 || rex.Code == 409); + return fatalError.Exception is ResponseException rex && (rex.Code == 400 || rex.Code == 409); } } @@ -377,12 +281,7 @@ CancellationToken token } catch (Exception exc) { - log.LogError( - exc, - "Failed to push {NumFailedEvents} events to CDF: {Message}", - count, - exc.Message - ); + log.LogError(exc, "Failed to push {NumFailedEvents} events to CDF: {Message}", count, exc.Message); eventPushFailures.Inc(); return exc is ResponseException rex && (rex.Code == 400 || rex.Code == 409); } @@ -463,43 +362,32 @@ await fdmDestination.PushNodes( var timeseriesMap = MapTimeseries(variables); bool isTimeseriesPushed = true; - if (pushCleanAssets) + if (pushCleanAssets && assetsMap.Any()) { - await PushCleanAssets(assetsMap, update.Variables, report, token); + await PushCleanAssets(assetsMap, update.Objects, report, result, token); } - if (pushCleanTimeseries) - { - isTimeseriesPushed = await PushCleanTimeseries( - timeseriesMap, - update.Variables, - report, - token - ); - } + isTimeseriesPushed = await PushCleanTimeseries( + timeseriesMap, + update.Variables, + report, + result, + token + ); var tasks = new List(); if (isTimeseriesPushed && fdmDestination != null) { - tasks.Add( - Task.Run( - () => - fdmDestination.PushNodes( - objects, - variables, - references, - Extractor, - token - ) - ) - ); + tasks.Add(Task.Run(() => PushFdm(objects, variables, references, result, token))); } - if (!pushCleanAssets) + if (!pushCleanAssets && assetsMap.Any()) { tasks.Add( - Task.Run(() => PushRawAssets(assetsMap, update.Variables, report, token)) + Task.Run( + () => PushRawAssets(assetsMap, update.Objects, report, result, token) + ) ); } @@ -507,12 +395,12 @@ await fdmDestination.PushNodes( { tasks.Add( Task.Run( - () => PushRawTimeseries(timeseriesMap, update.Variables, report, token) + () => PushRawTimeseries(timeseriesMap, update.Variables, report, result, token) ) ); } - tasks.Add(Task.Run(() => PushReferences(references, report, token))); + tasks.Add(Task.Run(() => PushReferences(references, report, result, token))); await Task.WhenAll(tasks); @@ -533,9 +421,37 @@ await fdmDestination.PushNodes( return result; } + private async Task PushFdm( + IEnumerable objects, + IEnumerable variables, + IEnumerable references, + PushResult result, + CancellationToken token + ) + { + bool pushResult = true; + try + { + pushResult = await fdmDestination!.PushNodes( + objects, + variables, + references, + Extractor, + token + ); + } + catch + { + pushResult = false; + } + result.Variables = pushResult; + result.Objects = pushResult; + result.References = pushResult; + } + private ConcurrentDictionary MapAssets(IEnumerable objects) { - return new ConcurrentDictionary( + return config.SkipMetadata ? new ConcurrentDictionary() : new ConcurrentDictionary( objects .Where(node => node.Source != NodeSource.CDF) .ToDictionary(obj => Extractor.GetUniqueId(obj.Id)!) @@ -552,6 +468,25 @@ IEnumerable variables } private async Task PushCleanAssets( + ConcurrentDictionary assetsMap, + TypeUpdateConfig update, + BrowseReport report, + PushResult result, + CancellationToken token + ) + { + try + { + await PushCleanAssets(assetsMap, update, report, token); + } + catch + { + result.Objects = false; + } + return result.Objects; + } + + private async Task PushCleanAssets( ConcurrentDictionary assetsMap, TypeUpdateConfig update, BrowseReport report, @@ -564,11 +499,29 @@ CancellationToken token { await UpdateAssets(assetsMap, assets, update, report, token); } - - return true; } private async Task PushCleanTimeseries( + ConcurrentDictionary timeseriesMap, + TypeUpdateConfig update, + BrowseReport report, + PushResult result, + CancellationToken token + ) + { + try + { + await PushCleanTimeseries(timeseriesMap, update, report, token); + } + catch + { + result.Variables = false; + } + + return result.Variables; + } + + private async Task PushCleanTimeseries( ConcurrentDictionary timeseriesMap, TypeUpdateConfig update, BrowseReport report, @@ -590,23 +543,8 @@ CancellationToken token { await UpdateTimeseries(toPushMeta, timeseries, update, report, token); } - - return true; } - // { - // await PushAssets(objects, variables, update.Objects, report, token); - // await CreateTimeseriesMetadata(variables, report, token); - // } - - // private async Task CreateTimeseriesMetadata(IEnumerable variables, BrowseReport report, CancellationToken token) - // { - // var tsMap = new ConcurrentDictionary( - // variables.ToDictionary(ts => ts.GetUniqueId(Extractor))!); - // await CreateTimeseries(tsMap, report, true, token); - // } - - /// /// Reset the pusher, preparing it to be restarted /// @@ -792,6 +730,24 @@ await destination.CogniteClient.Events.ListAsync( /// /// List of references to push /// True if nothing failed unexpectedly + private async Task PushReferences( + IEnumerable references, + BrowseReport report, + PushResult result, + CancellationToken token + ) + { + try + { + await PushReferences(references, report, token); + } + catch (Exception e) + { + log.LogError(e, "Failed to ensure references"); + result.References = false; + } + } + private async Task PushReferences( IEnumerable references, BrowseReport report, @@ -864,86 +820,53 @@ public async Task ExecuteDeletes(DeletedNodes deletes, CancellationToken t /// Update list of nodes as assets in CDF Raw. /// /// Id, node map for the assets that should be pushed. - private async Task UpdateRawAssets( - IDictionary assetMap, - BrowseReport report, - CancellationToken token - ) + private async Task UpdateRawAssets(IDictionary assetMap, BrowseReport report, CancellationToken token) { - if (config.RawMetadata?.Database == null || config.RawMetadata?.AssetsTable == null) - return; - await UpsertRawRows( - config.RawMetadata.Database, - config.RawMetadata.AssetsTable, - rows => + if (config.RawMetadata?.Database == null || config.RawMetadata?.AssetsTable == null) return; + await UpsertRawRows(config.RawMetadata.Database, config.RawMetadata.AssetsTable, rows => + { + if (rows == null) { - if (rows == null) - { - return assetMap - .Select( - kvp => - ( - kvp.Key, - update: PusherUtils.CreateRawUpdate( - log, - Extractor.StringConverter, - kvp.Value, - null, - ConverterType.Node - ) - ) - ) - .Where(elem => elem.update != null) - .ToDictionary(pair => pair.Key, pair => pair.update!.Value); - } + return assetMap.Select(kvp => ( + kvp.Key, + update: PusherUtils.CreateRawUpdate(log, Extractor.StringConverter, kvp.Value, null, ConverterType.Node) + )).Where(elem => elem.update != null) + .ToDictionary(pair => pair.Key, pair => pair.update!.Value); + } - var toWrite = - new List<( - string key, - RawRow> row, - BaseUANode node - )>(); + var toWrite = new List<(string key, RawRow> row, BaseUANode node)>(); - foreach (var row in rows) + foreach (var row in rows) + { + if (assetMap.TryGetValue(row.Key, out var ts)) { - if (assetMap.TryGetValue(row.Key, out var ts)) - { - toWrite.Add((row.Key, row, ts)); - assetMap.Remove(row.Key); - } + toWrite.Add((row.Key, row, ts)); + assetMap.Remove(row.Key); } + } - var updates = new Dictionary(); + var updates = new Dictionary(); - foreach (var (key, row, node) in toWrite) - { - var update = PusherUtils.CreateRawUpdate( - log, - Extractor.StringConverter, - node, - row, - ConverterType.Node - ); + foreach (var (key, row, node) in toWrite) + { + var update = PusherUtils.CreateRawUpdate(log, Extractor.StringConverter, node, row, ConverterType.Node); - if (update != null) + if (update != null) + { + updates[key] = update.Value; + if (row == null) { - updates[key] = update.Value; - if (row == null) - { - report.AssetsCreated++; - } - else - { - report.AssetsUpdated++; - } + report.AssetsCreated++; + } + else + { + report.AssetsUpdated++; } } + } - return updates; - }, - null, - token - ); + return updates; + }, null, token); } /// @@ -951,94 +874,45 @@ BaseUANode node /// This does not create rows if they already exist. /// /// Id, node map for the assets that should be pushed. - private async Task CreateRawAssets( - IDictionary assetMap, - BrowseReport report, - CancellationToken token - ) + private async Task CreateRawAssets(IDictionary assetMap, BrowseReport report, CancellationToken token) { - if (config.RawMetadata?.Database == null || config.RawMetadata?.AssetsTable == null) - return; + if (config.RawMetadata?.Database == null || config.RawMetadata?.AssetsTable == null) return; - await EnsureRawRows( - config.RawMetadata.Database, - config.RawMetadata.AssetsTable, - assetMap.Keys, - ids => - { - var assets = ids.Select(id => (assetMap[id], id)); - var creates = assets - .Select( - pair => - ( - pair.Item1.ToJson( - log, - Extractor.StringConverter, - ConverterType.Node - ), - pair.id - ) - ) - .Where(pair => pair.Item1 != null) - .ToDictionary(pair => pair.id, pair => pair.Item1!.RootElement); - report.AssetsCreated += creates.Count; - return creates; - }, - new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }, - token - ); + await EnsureRawRows(config.RawMetadata.Database, config.RawMetadata.AssetsTable, assetMap.Keys, ids => + { + var assets = ids.Select(id => (assetMap[id], id)); + var creates = assets.Select(pair => (pair.Item1.ToJson(log, Extractor.StringConverter, ConverterType.Node), pair.id)) + .Where(pair => pair.Item1 != null) + .ToDictionary(pair => pair.id, pair => pair.Item1!.RootElement); + report.AssetsCreated += creates.Count; + return creates; + }, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }, token); } /// /// Create assets in CDF Clean. /// /// Id, node map for the assets that should be pushed. - private async Task> CreateAssets( - IDictionary assetMap, - BrowseReport report, - CancellationToken token - ) + private async Task> CreateAssets(IDictionary assetMap, BrowseReport report, CancellationToken token) { var assets = new List(); - foreach ( - var chunk in Chunking.ChunkByHierarchy( - assetMap.Values, - config.CdfChunking.Assets, - node => node.Id, - node => node.ParentId - ) - ) + foreach (var chunk in Chunking.ChunkByHierarchy(assetMap.Values, config.CdfChunking.Assets, node => node.Id, node => node.ParentId)) { - var assetChunk = await destination.GetOrCreateAssetsAsync( - chunk.Select(node => Extractor.GetUniqueId(node.Id)!), - ids => - { - var assets = ids.Select(id => assetMap[id]); - var creates = assets - .Select( - node => - node.ToCDFAsset( - fullConfig, - Extractor, - config.DataSet?.Id, - config.MetadataMapping?.Assets - ) - ) - .Where(asset => asset != null); - report.AssetsCreated += creates.Count(); - return creates; - }, - RetryMode.None, - SanitationMode.Clean, - token - ); + var assetChunk = await destination.GetOrCreateAssetsAsync(chunk.Select(node => Extractor.GetUniqueId(node.Id)!), ids => + { + var assets = ids.Select(id => assetMap[id]); + var creates = assets + .Select(node => node.ToCDFAsset(fullConfig, Extractor, config.DataSet?.Id, config.MetadataMapping?.Assets)) + .Where(asset => asset != null); + report.AssetsCreated += creates.Count(); + return creates; + }, RetryMode.None, SanitationMode.Clean, token); log.LogResult(assetChunk, RequestType.CreateAssets, true); assetChunk.ThrowOnFatal(); - if (assetChunk.Results == null) - continue; + if (assetChunk.Results == null) continue; foreach (var asset in assetChunk.Results) { @@ -1116,31 +990,35 @@ private async Task PushRawAssets( ConcurrentDictionary assetsMap, TypeUpdateConfig update, BrowseReport report, + PushResult result, CancellationToken token ) { - if (config.SkipMetadata) - return; - - if (!assetsMap.Any()) - return; - - var metaMap = config.MetadataMapping?.Assets; - bool useRawAssets = - config.RawMetadata != null - && !string.IsNullOrWhiteSpace(config.RawMetadata.Database) - && !string.IsNullOrWhiteSpace(config.RawMetadata.AssetsTable); + try + { + await PushRawAssets(assetsMap, update, report, token); + } + catch (Exception e) + { + log.LogError(e, "Failed to ensure assets"); + result.Objects = false; + } + } - if (useRawAssets) + private async Task PushRawAssets( + ConcurrentDictionary assetsMap, + TypeUpdateConfig update, + BrowseReport report, + CancellationToken token + ) + { + if (update.AnyUpdate) { - if (update.AnyUpdate) - { - await UpdateRawAssets(assetsMap, report, token); - } - else - { - await CreateRawAssets(assetsMap, report, token); - } + await UpdateRawAssets(assetsMap, report, token); + } + else + { + await CreateRawAssets(assetsMap, report, token); } } @@ -1466,6 +1344,24 @@ CancellationToken token /// /// Timeseries to push /// Configuration for which fields, if any, to update in CDF + private async Task PushRawTimeseries( + ConcurrentDictionary tsIds, + TypeUpdateConfig update, + BrowseReport report, + PushResult result, + CancellationToken token + ) + { + try + { + await PushRawTimeseries(tsIds, update, report, token); + } + catch + { + result.Variables = false; + } + } + private async Task PushRawTimeseries( ConcurrentDictionary tsIds, TypeUpdateConfig update, diff --git a/MQTTCDFBridge/MQTTCDFBridge.csproj b/MQTTCDFBridge/MQTTCDFBridge.csproj index 42044042..95bcc63f 100644 --- a/MQTTCDFBridge/MQTTCDFBridge.csproj +++ b/MQTTCDFBridge/MQTTCDFBridge.csproj @@ -11,7 +11,7 @@ - + diff --git a/Test/Test.csproj b/Test/Test.csproj index 655f4e61..5c84bd10 100644 --- a/Test/Test.csproj +++ b/Test/Test.csproj @@ -32,7 +32,7 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive - + all diff --git a/Test/Unit/DeleteTest.cs b/Test/Unit/DeleteTest.cs index c4f1acd2..4b5498ba 100644 --- a/Test/Unit/DeleteTest.cs +++ b/Test/Unit/DeleteTest.cs @@ -103,12 +103,15 @@ public Task StoreExtractionState(IEnumerable extractionStates, string tabl public class DeleteTest { private readonly StaticServerTestFixture tester; + private readonly ITestOutputHelper _output; + public DeleteTest(ITestOutputHelper output, StaticServerTestFixture tester) { this.tester = tester ?? throw new ArgumentNullException(nameof(tester)); tester.ResetConfig(); tester.Init(output); tester.Client.TypeManager.Reset(); + _output = output; } private static UAObject GetObject(string id) @@ -444,6 +447,7 @@ public async Task TestCDFDeleteRaw() await extractor.RunExtractor(true); Assert.True(handler.AssetRaw.ContainsKey(addedExtId)); Assert.True(handler.TimeseriesRaw.ContainsKey(addedVarExtId)); + handler.Timeseries.Values.ToList().ForEach(v => _output.WriteLine(v.ToString())); Assert.True(handler.Timeseries.ContainsKey(addedVarExtId)); Assert.False(handler.AssetRaw[addedExtId].TryGetProperty("deleted", out _)); Assert.False(handler.TimeseriesRaw[addedVarExtId].TryGetProperty("deleted", out _)); From fa3bd31efe92f5be28ad335c4dee23b236c30e9a Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Thu, 29 Jun 2023 13:40:23 +0200 Subject: [PATCH 03/26] test: fix last failing test --- Extractor/Pushers/CDFPusher.cs | 52 +++++++++------------------------- Test/Unit/CDFPusherTest.cs | 2 ++ 2 files changed, 15 insertions(+), 39 deletions(-) diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index 45c0b79c..4e386293 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -379,28 +379,20 @@ await fdmDestination.PushNodes( if (isTimeseriesPushed && fdmDestination != null) { - tasks.Add(Task.Run(() => PushFdm(objects, variables, references, result, token))); + tasks.Add(PushFdm(objects, variables, references, result, token)); } if (!pushCleanAssets && assetsMap.Any()) { - tasks.Add( - Task.Run( - () => PushRawAssets(assetsMap, update.Objects, report, result, token) - ) - ); + tasks.Add(PushRawAssets(assetsMap, update.Objects, report, result, token)); } if (!pushCleanTimeseries) { - tasks.Add( - Task.Run( - () => PushRawTimeseries(timeseriesMap, update.Variables, report, result, token) - ) - ); + tasks.Add(PushRawTimeseries(timeseriesMap, update.Variables, report, result, token)); } - tasks.Add(Task.Run(() => PushReferences(references, report, result, token))); + tasks.Add(PushReferences(references, report, result, token)); await Task.WhenAll(tasks); @@ -531,7 +523,7 @@ CancellationToken token var timeseries = await CreateTimeseries( timeseriesMap, report, - config.SkipMetadata, + !pushCleanTimeseries || config.SkipMetadata, token ); @@ -539,7 +531,7 @@ CancellationToken token .Where(kvp => kvp.Value.Source != NodeSource.CDF) .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); - if (update.AnyUpdate && toPushMeta.Any()) + if (update.AnyUpdate && toPushMeta.Any() && pushCleanTimeseries) { await UpdateTimeseries(toPushMeta, timeseries, update, report, token); } @@ -1292,8 +1284,7 @@ private async Task UpdateTimeseries( IEnumerable timeseries, TypeUpdateConfig update, BrowseReport report, - CancellationToken token - ) + CancellationToken token) { var updates = new List(); var existing = timeseries.ToDictionary(asset => asset.ExternalId); @@ -1301,22 +1292,10 @@ CancellationToken token { if (existing.TryGetValue(kvp.Key, out var ts)) { - var tsUpdate = PusherUtils.GetTSUpdate( - fullConfig, - Extractor, - ts, - kvp.Value, - update, - nodeToAssetIds - ); - if (tsUpdate == null) - continue; - if ( - tsUpdate.AssetId != null - || tsUpdate.Description != null - || tsUpdate.Name != null - || tsUpdate.Metadata != null - ) + var tsUpdate = PusherUtils.GetTSUpdate(fullConfig, Extractor, ts, kvp.Value, update, nodeToAssetIds); + if (tsUpdate == null) continue; + if (tsUpdate.AssetId != null || tsUpdate.Description != null + || tsUpdate.Name != null || tsUpdate.Metadata != null) { updates.Add(new TimeSeriesUpdateItem(ts.ExternalId) { Update = tsUpdate }); } @@ -1325,12 +1304,7 @@ CancellationToken token if (updates.Any()) { - var res = await destination.UpdateTimeSeriesAsync( - updates, - RetryMode.OnError, - SanitationMode.Clean, - token - ); + var res = await destination.UpdateTimeSeriesAsync(updates, RetryMode.OnError, SanitationMode.Clean, token); log.LogResult(res, RequestType.UpdateTimeSeries, false); res.ThrowOnFatal(); @@ -1373,7 +1347,7 @@ CancellationToken token .Where(kvp => kvp.Value.Source != NodeSource.CDF) .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); - if (update.AnyUpdate) + if (update.AnyUpdate && !config.SkipMetadata) { await UpdateRawTimeseries(toPushMeta, report, token); } diff --git a/Test/Unit/CDFPusherTest.cs b/Test/Unit/CDFPusherTest.cs index f301d52e..dfc3e508 100644 --- a/Test/Unit/CDFPusherTest.cs +++ b/Test/Unit/CDFPusherTest.cs @@ -34,6 +34,7 @@ public CDFPusherTestFixture() : base() public sealed class CDFPusherTest : IClassFixture, IDisposable { private readonly CDFPusherTestFixture tester; + private readonly ITestOutputHelper _output; private CDFMockHandler handler; private CDFPusher pusher; public CDFPusherTest(ITestOutputHelper output, CDFPusherTestFixture tester) @@ -43,6 +44,7 @@ public CDFPusherTest(ITestOutputHelper output, CDFPusherTestFixture tester) tester.ResetConfig(); (handler, pusher) = tester.GetCDFPusher(); tester.Client.TypeManager.Reset(); + _output = output; } public void Dispose() From 455d5a390a3f69871b13180b79f911f3c1ca8016 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Tue, 4 Jul 2023 12:07:14 +0200 Subject: [PATCH 04/26] feat: segregation --- Extractor/Pushers/CDFPusher.cs | 153 +++------ Extractor/Pushers/Writers/AssetsWriter.cs | 143 ++++++++ .../Writers/Interfaces/IAssetsWriter.cs | 19 ++ .../Pushers/Writers/Interfaces/IRawWriter.cs | 37 ++ .../Interfaces/IRelationshipsWriter.cs | 14 + .../Writers/Interfaces/ITimeseriesWriter.cs | 21 ++ Extractor/Pushers/Writers/RawWriter.cs | 317 ++++++++++++++++++ .../Pushers/Writers/RelationshipsWriter.cs | 28 ++ Extractor/Pushers/Writers/TimeseriesWriter.cs | 179 ++++++++++ Extractor/Pushers/Writers/WriterUtils.cs | 40 +++ ExtractorLauncher/ExtractorStarter.cs | 3 + Test/Utils/BaseExtractorTestFixture.cs | 2 + config/opc.ua.net.extractor.Config.xml | 4 +- 13 files changed, 857 insertions(+), 103 deletions(-) create mode 100644 Extractor/Pushers/Writers/AssetsWriter.cs create mode 100644 Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs create mode 100644 Extractor/Pushers/Writers/Interfaces/IRawWriter.cs create mode 100644 Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs create mode 100644 Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs create mode 100644 Extractor/Pushers/Writers/RawWriter.cs create mode 100644 Extractor/Pushers/Writers/RelationshipsWriter.cs create mode 100644 Extractor/Pushers/Writers/TimeseriesWriter.cs create mode 100644 Extractor/Pushers/Writers/WriterUtils.cs diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index 4e386293..13e40130 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -23,6 +23,7 @@ You should have received a copy of the GNU General Public License using Cognite.OpcUa.Nodes; using Cognite.OpcUa.NodeSources; using Cognite.OpcUa.Pushers.FDM; +using Cognite.OpcUa.Pushers.Writers.Interfaces; using Cognite.OpcUa.Types; using CogniteSdk; using Microsoft.Extensions.DependencyInjection; @@ -47,6 +48,9 @@ public sealed class CDFPusher : IPusher { private readonly CognitePusherConfig config; private readonly FullConfig fullConfig; + private readonly IRawWriter rawWriter; + private readonly ITimeseriesWriter timeseriesWriter; + private readonly IAssetsWriter assetsWriter; private readonly IDictionary nodeToAssetIds = new Dictionary(); public bool DataFailing { get; set; } @@ -93,6 +97,10 @@ public CDFPusher( BaseConfig = config; this.destination = destination; this.fullConfig = fullConfig; + // rawWriter = provider.GetRequiredService(); + // timeseriesWriter = provider.GetRequiredService(); + // assetsWriter = provider.GetRequiredService(); + Console.WriteLine("testing"); if (config.BrowseCallback != null && (config.BrowseCallback.Id.HasValue || !string.IsNullOrEmpty(config.BrowseCallback.ExternalId))) { callback = new BrowseCallback(destination, config.BrowseCallback, log); @@ -1069,87 +1077,53 @@ await MarkRawRowsAsDeleted( /// Update list of nodes as timeseries in CDF Raw. /// /// Id, node map for the timeseries that should be pushed. - private async Task UpdateRawTimeseries( - IDictionary tsMap, - BrowseReport report, - CancellationToken token - ) + private async Task UpdateRawTimeseries(IDictionary tsMap, BrowseReport report, CancellationToken token) { - if (config.RawMetadata?.Database == null || config.RawMetadata.TimeseriesTable == null) - return; + if (config.RawMetadata?.Database == null || config.RawMetadata.TimeseriesTable == null) return; + await UpsertRawRows(config.RawMetadata.Database, config.RawMetadata.TimeseriesTable, rows => + { + if (rows == null) + { + return tsMap.Select(kvp => ( + kvp.Key, + update: PusherUtils.CreateRawUpdate(log, Extractor.StringConverter, kvp.Value, null, ConverterType.Variable) + )).Where(elem => elem.update != null) + .ToDictionary(pair => pair.Key, pair => pair.update!.Value); + } - await UpsertRawRows( - config.RawMetadata.Database, - config.RawMetadata.TimeseriesTable, - rows => + var toWrite = new List<(string key, RawRow> row, UAVariable node)>(); + + foreach (var row in rows) { - if (rows == null) + if (tsMap.TryGetValue(row.Key, out var ts)) { - return tsMap - .Select( - kvp => - ( - kvp.Key, - update: PusherUtils.CreateRawUpdate( - log, - Extractor.StringConverter, - kvp.Value, - null, - ConverterType.Variable - ) - ) - ) - .Where(elem => elem.update != null) - .ToDictionary(pair => pair.Key, pair => pair.update!.Value); + toWrite.Add((row.Key, row, ts)); + tsMap.Remove(row.Key); } + } + + var updates = new Dictionary(); - var toWrite = - new List<( - string key, - RawRow> row, - UAVariable node - )>(); + foreach (var (key, row, node) in toWrite) + { + var update = PusherUtils.CreateRawUpdate(log, Extractor.StringConverter, node, row, ConverterType.Variable); - foreach (var row in rows) + if (update != null) { - if (tsMap.TryGetValue(row.Key, out var ts)) + updates[key] = update.Value; + if (row == null) { - toWrite.Add((row.Key, row, ts)); - tsMap.Remove(row.Key); + report.TimeSeriesCreated++; } - } - - var updates = new Dictionary(); - - foreach (var (key, row, node) in toWrite) - { - var update = PusherUtils.CreateRawUpdate( - log, - Extractor.StringConverter, - node, - row, - ConverterType.Variable - ); - - if (update != null) + else { - updates[key] = update.Value; - if (row == null) - { - report.TimeSeriesCreated++; - } - else - { - report.TimeSeriesUpdated++; - } + report.TimeSeriesUpdated++; } } + } - return updates; - }, - null, - token - ); + return updates; + }, null, token); } /// @@ -1157,43 +1131,20 @@ UAVariable node /// This does not create rows if they already exist. /// /// Id, node map for the timeseries that should be pushed. - private async Task CreateRawTimeseries( - IDictionary tsMap, - BrowseReport report, - CancellationToken token - ) + private async Task CreateRawTimeseries(IDictionary tsMap, BrowseReport report, CancellationToken token) { - if (config.RawMetadata?.Database == null || config.RawMetadata.TimeseriesTable == null) - return; + if (config.RawMetadata?.Database == null || config.RawMetadata.TimeseriesTable == null) return; - await EnsureRawRows( - config.RawMetadata.Database, - config.RawMetadata.TimeseriesTable, - tsMap.Keys, - ids => - { - var timeseries = ids.Select(id => (tsMap[id], id)); - var creates = timeseries - .Select( - pair => - ( - pair.Item1.ToJson( - log, - Extractor.StringConverter, - ConverterType.Variable - ), - pair.id - ) - ) - .Where(pair => pair.Item1 != null) - .ToDictionary(pair => pair.id, pair => pair.Item1!.RootElement); + await EnsureRawRows(config.RawMetadata.Database, config.RawMetadata.TimeseriesTable, tsMap.Keys, ids => + { + var timeseries = ids.Select(id => (tsMap[id], id)); + var creates = timeseries.Select(pair => (pair.Item1.ToJson(log, Extractor.StringConverter, ConverterType.Variable), pair.id)) + .Where(pair => pair.Item1 != null) + .ToDictionary(pair => pair.id, pair => pair.Item1!.RootElement); - report.TimeSeriesCreated += creates.Count; - return creates; - }, - new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }, - token - ); + report.TimeSeriesCreated += creates.Count; + return creates; + }, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }, token); } /// diff --git a/Extractor/Pushers/Writers/AssetsWriter.cs b/Extractor/Pushers/Writers/AssetsWriter.cs new file mode 100644 index 00000000..afd6a05d --- /dev/null +++ b/Extractor/Pushers/Writers/AssetsWriter.cs @@ -0,0 +1,143 @@ +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Cognite.Extensions; +using Cognite.Extractor.Common; +using Cognite.Extractor.Utils; +using Cognite.OpcUa.Config; +using Cognite.OpcUa.Nodes; +using Cognite.OpcUa.Pushers.Writers.Interfaces; +using CogniteSdk; +using Microsoft.Extensions.Logging; +using Opc.Ua; + +namespace Cognite.OpcUa.Pushers.Writers +{ + public class AssetsWriter : IAssetsWriter + { + private readonly ILogger log; + private readonly FullConfig config; + private readonly CogniteDestination destination; + private readonly CancellationToken token; + private readonly UAExtractor extractor; + + public AssetsWriter( + ILogger logger, + CancellationToken token, + CogniteDestination destination, + FullConfig config, + UAExtractor extractor) + { + this.log = logger; + this.config = config; + this.destination = destination; + this.token = token; + this.extractor = extractor; + } + + public async Task PushNodes( + ConcurrentDictionary nodes, + IDictionary nodeToAssetIds, + TypeUpdateConfig update, + BrowseReport report + ) + { + var assets = await CreateAssets(nodes, nodeToAssetIds, report); + + if (update.AnyUpdate) + { + await UpdateAssets(nodes, assets, update, report); + } + } + + private async Task> CreateAssets( + IDictionary assetMap, + IDictionary nodeToAssetIds, + BrowseReport report) + { + var assets = new List(); + var maxSize = config.Cognite?.CdfChunking.Assets ?? 1000; + foreach (var chunk in Chunking.ChunkByHierarchy(assetMap.Values, maxSize, node => node.Id, node => node.ParentId)) + { + var assetChunk = await destination.GetOrCreateAssetsAsync(chunk.Select(node => extractor.GetUniqueId(node.Id)!), ids => + { + var assets = ids.Select(id => assetMap[id]); + var creates = assets + .Select(node => node.ToCDFAsset( + config, + extractor, + config.Cognite?.DataSet?.Id, + config.Cognite?.MetadataMapping?.Assets)) + .Where(asset => asset != null); + report.AssetsCreated += creates.Count(); + return creates; + }, RetryMode.None, SanitationMode.Clean, token); + + log.LogResult(assetChunk, RequestType.CreateAssets, true); + + assetChunk.ThrowOnFatal(); + + if (assetChunk.Results == null) continue; + + foreach (var asset in assetChunk.Results) + { + nodeToAssetIds[assetMap[asset.ExternalId].Id] = asset.Id; + } + assets.AddRange(assetChunk.Results); + } + return assets; + } + + private async Task UpdateAssets( + IDictionary assetMap, + IEnumerable assets, + TypeUpdateConfig update, + BrowseReport report + ) + { + var updates = new List(); + var existing = assets.ToDictionary(asset => asset.ExternalId); + foreach (var kvp in assetMap) + { + if (existing.TryGetValue(kvp.Key, out var asset)) + { + var assetUpdate = PusherUtils.GetAssetUpdate( + config, + asset, + kvp.Value, + extractor, + update + ); + + if (assetUpdate == null) + continue; + if ( + assetUpdate.ParentExternalId != null + || assetUpdate.Description != null + || assetUpdate.Name != null + || assetUpdate.Metadata != null + ) + { + updates.Add(new AssetUpdateItem(asset.ExternalId) { Update = assetUpdate }); + } + } + } + if (updates.Any()) + { + var res = await destination.UpdateAssetsAsync( + updates, + RetryMode.OnError, + SanitationMode.Clean, + token + ); + + log.LogResult(res, RequestType.UpdateAssets, false); + + res.ThrowOnFatal(); + + report.AssetsUpdated += res.Results?.Count() ?? 0; + } + } } +} diff --git a/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs b/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs new file mode 100644 index 00000000..6b03a057 --- /dev/null +++ b/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs @@ -0,0 +1,19 @@ +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Threading.Tasks; +using Cognite.OpcUa.Config; +using Cognite.OpcUa.Nodes; +using Opc.Ua; + +namespace Cognite.OpcUa.Pushers.Writers.Interfaces +{ + public interface IAssetsWriter + { + Task PushNodes( + ConcurrentDictionary assetMap, + IDictionary nodeToAssetIds, + TypeUpdateConfig config, + BrowseReport report + ); + } +} diff --git a/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs b/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs new file mode 100644 index 00000000..182b1167 --- /dev/null +++ b/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs @@ -0,0 +1,37 @@ +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Cognite.OpcUa.Nodes; +using CogniteSdk; + +namespace Cognite.OpcUa.Pushers.Writers.Interfaces +{ + public interface IRawWriter + { + static JsonSerializerOptions options => + new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; + + Task>>> GetRawRows( + string dbName, + string tableName, + IEnumerable? columns + ); + + Task PushNodes( + string database, + string table, + ConcurrentDictionary rows, + bool shouldUpdate, + BrowseReport report + ) + where T : BaseUANode; + + Task PushReferences( + string database, + string table, + IEnumerable relationships, + BrowseReport report + ); + } +} diff --git a/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs b/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs new file mode 100644 index 00000000..22246795 --- /dev/null +++ b/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs @@ -0,0 +1,14 @@ +using System.Collections.Generic; +using System.Threading.Tasks; +using Cognite.OpcUa.Config; +using Cognite.OpcUa.Types; + +namespace Cognite.OpcUa.Pushers.Destinations.Interfaces +{ + public interface IRelationshipsWriter + { + FullConfig config { get; } + + Task PushReferences(IEnumerable references, BrowseReport report); + } +} diff --git a/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs b/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs new file mode 100644 index 00000000..4b859de8 --- /dev/null +++ b/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs @@ -0,0 +1,21 @@ +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Threading.Tasks; +using Cognite.OpcUa.Config; +using Cognite.OpcUa.Nodes; +using Opc.Ua; + +namespace Cognite.OpcUa.Pushers.Writers.Interfaces +{ + public interface ITimeseriesWriter + { + Task PushVariables( + ConcurrentDictionary timeseriesMap, + IDictionary nodeToAssetIds, + HashSet mismatchedTimeseries, + TypeUpdateConfig update, + BrowseReport report + ) +; + } +} diff --git a/Extractor/Pushers/Writers/RawWriter.cs b/Extractor/Pushers/Writers/RawWriter.cs new file mode 100644 index 00000000..9a549ecb --- /dev/null +++ b/Extractor/Pushers/Writers/RawWriter.cs @@ -0,0 +1,317 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Cognite.Extractor.Utils; +using Cognite.OpcUa.Config; +using Cognite.OpcUa.Nodes; +using Cognite.OpcUa.Pushers.Writers.Interfaces; +using Cognite.OpcUa.Types; +using CogniteSdk; +using Microsoft.Extensions.Logging; + +namespace Cognite.OpcUa.Pushers.Writers +{ + public class RawWriter : IRawWriter + { + private readonly ILogger log; + + private CancellationToken token { get; } + + private FullConfig config { get; } + private UAExtractor Extractor { get; } + private CogniteDestination destination { get; } + + public RawWriter( + ILogger log, + CancellationToken token, + CogniteDestination destination, + FullConfig config, + UAExtractor extractor + ) + { + this.log = log; + this.token = token; + this.config = config; + this.Extractor = extractor; + this.destination = destination; + } + + public async Task>>> GetRawRows( + string dbName, + string tableName, + IEnumerable? columns + ) + { + string? cursor = null; + var rows = new List>>(); + do + { + try + { + var result = await destination.CogniteClient.Raw.ListRowsAsync< + Dictionary + >( + dbName, + tableName, + new RawRowQuery + { + Cursor = cursor, + Limit = 10_000, + Columns = columns + }, + null, + token + ); + rows.AddRange(result.Items); + cursor = result.NextCursor; + } + catch (ResponseException ex) when (ex.Code == 404) + { + log.LogWarning("Table or database not found: {Message}", ex.Message); + break; + } + } while (cursor != null); + return rows; + } + + public async Task PushNodes( + string database, + string table, + ConcurrentDictionary rows, + bool shouldUpdate, + BrowseReport report + ) + where T : BaseUANode + { + if (shouldUpdate) + { + await UpdateRawAssets(database, table, rows, report); + } + else + { + await CreateRawAssets(database, table, rows, report); + } + } + + private async Task UpdateRawAssets( + string database, + string table, + IDictionary dataSet, + BrowseReport report + ) + where T : BaseUANode + { + if (database == null || table == null) + return; + await UpsertRawRows( + database, + table, + rows => + { + if (rows == null) + { + return dataSet + .Select( + kvp => + ( + kvp.Key, + update: PusherUtils.CreateRawUpdate( + log, + Extractor.StringConverter, + kvp.Value, + null, + ConverterType.Node + ) + ) + ) + .Where(elem => elem.update != null) + .ToDictionary(pair => pair.Key, pair => pair.update!.Value); + } + + var toWrite = + new List<( + string key, + RawRow> row, + T node + )>(); + + foreach (var row in rows) + { + if (dataSet.TryGetValue(row.Key, out var ts)) + { + toWrite.Add((row.Key, row, ts)); + dataSet.Remove(row.Key); + } + } + + var updates = new Dictionary(); + + foreach (var (key, row, node) in toWrite) + { + var update = PusherUtils.CreateRawUpdate( + log, + Extractor.StringConverter, + node, + row, + ConverterType.Node + ); + + if (update != null) + { + updates[key] = update.Value; + if (row == null) + { + report.AssetsCreated++; + } + else + { + report.AssetsUpdated++; + } + } + } + + return updates; + }, + null, + token + ); + } + + private async Task CreateRawAssets( + string database, + string table, + IDictionary assetMap, + BrowseReport report + ) + where T : BaseUANode + { + if (database == null || table == null) + return; + + await EnsureRawRows( + database, + table, + assetMap.Keys, + ids => + { + var assets = ids.Select(id => (assetMap[id], id)); + var creates = assets + .Select( + pair => + ( + pair.Item1.ToJson( + log, + Extractor.StringConverter, + ConverterType.Node + ), + pair.id + ) + ) + .Where(pair => pair.Item1 != null) + .ToDictionary(pair => pair.id, pair => pair.Item1!.RootElement); + report.AssetsCreated += creates.Count; + return creates; + }, + new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase } + ); + } + + private async Task UpsertRawRows( + string dbName, + string tableName, + Func< + IEnumerable>>?, + IDictionary + > dtoBuilder, + JsonSerializerOptions? options, + CancellationToken token + ) + { + int count = 0; + async Task CallAndCreate(IEnumerable>>? rows) + { + var toUpsert = dtoBuilder(rows); + count += toUpsert.Count; + await destination.InsertRawRowsAsync(dbName, tableName, toUpsert, options, token); + } + + string? cursor = null; + do + { + try + { + var result = await destination.CogniteClient.Raw.ListRowsAsync< + Dictionary + >( + dbName, + tableName, + new RawRowQuery { Cursor = cursor, Limit = 10_000 }, + null, + token + ); + cursor = result.NextCursor; + + await CallAndCreate(result.Items); + } + catch (ResponseException ex) when (ex.Code == 404) + { + log.LogWarning("Table or database not found: {Message}", ex.Message); + break; + } + } while (cursor != null); + + await CallAndCreate(null); + + log.LogInformation("Updated or created {Count} rows in CDF Raw", count); + } + + private async Task EnsureRawRows( + string dbName, + string tableName, + IEnumerable keys, + Func, IDictionary> dtoBuilder, + JsonSerializerOptions options + ) + { + var rows = await GetRawRows(dbName, tableName, new[] { "," }); + var existing = rows.Select(row => row.Key); + + var toCreate = keys.Except(existing); + if (!toCreate.Any()) + return; + log.LogInformation("Creating {Count} raw rows in CDF", toCreate.Count()); + + var createDtos = dtoBuilder(toCreate); + + await destination.InsertRawRowsAsync(dbName, tableName, createDtos, options, token); + } + + public async Task PushReferences( + string database, + string table, + IEnumerable relationships, + BrowseReport report + ) + { + await EnsureRawRows( + database, + table, + relationships.Select(rel => rel.ExternalId), + ids => + { + var idSet = ids.ToHashSet(); + var creates = relationships + .Where(rel => idSet.Contains(rel.ExternalId)) + .ToDictionary(rel => rel.ExternalId); + report.RelationshipsCreated += creates.Count; + return creates; + }, + new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase } + ); + } + } +} diff --git a/Extractor/Pushers/Writers/RelationshipsWriter.cs b/Extractor/Pushers/Writers/RelationshipsWriter.cs new file mode 100644 index 00000000..4641b5fa --- /dev/null +++ b/Extractor/Pushers/Writers/RelationshipsWriter.cs @@ -0,0 +1,28 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Cognite.OpcUa.Config; +using Cognite.OpcUa.Pushers.Destinations.Interfaces; +using Cognite.OpcUa.Types; +using Microsoft.Extensions.Logging; + +namespace Cognite.OpcUa.Pushers.Destinations +{ + public class RelationshipsWriter : IRelationshipsWriter + { + private ILogger _logger; + private FullConfig _config; + + public FullConfig config => _config; + + public RelationshipsWriter(ILogger logger, dynamic config, CancellationToken token) { + _logger = logger; + _config = config; + } + + public Task PushReferences(IEnumerable references, BrowseReport report) + { + throw new System.NotImplementedException(); + } + } +} diff --git a/Extractor/Pushers/Writers/TimeseriesWriter.cs b/Extractor/Pushers/Writers/TimeseriesWriter.cs new file mode 100644 index 00000000..5d8ef016 --- /dev/null +++ b/Extractor/Pushers/Writers/TimeseriesWriter.cs @@ -0,0 +1,179 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Cognite.Extensions; +using Cognite.Extractor.Utils; +using Cognite.OpcUa.Config; +using Cognite.OpcUa.Nodes; +using Cognite.OpcUa.NodeSources; +using Cognite.OpcUa.Pushers.Writers.Interfaces; +using CogniteSdk; +using Microsoft.Extensions.Logging; +using Opc.Ua; + +namespace Cognite.OpcUa.Pushers.Writers +{ + public class TimeseriesWriter : ITimeseriesWriter + { + private ILogger log; + private readonly FullConfig config; + private readonly CogniteDestination destination; + private readonly CancellationToken token; + private readonly UAExtractor extractor; + private bool pushCleanTimeseries => + string.IsNullOrWhiteSpace(config.Cognite?.RawMetadata?.Database) + && string.IsNullOrWhiteSpace(config.Cognite?.RawMetadata?.TimeseriesTable); + + public TimeseriesWriter( + ILogger logger, + CancellationToken token, + CogniteDestination destination, + FullConfig config, + UAExtractor extractor) + { + this.log = logger; + this.config = config; + this.destination = destination; + this.token = token; + this.extractor = extractor; + } + + + public async Task PushVariables( + ConcurrentDictionary timeseriesMap, + IDictionary nodeToAssetIds, + HashSet mismatchedTimeseries, + TypeUpdateConfig update, + BrowseReport report + ) + { + var skipMeta = config.Cognite?.SkipMetadata; + var timeseries = await CreateTimeseries( + timeseriesMap, + nodeToAssetIds, + mismatchedTimeseries, + report, + !pushCleanTimeseries && skipMeta.HasValue ? skipMeta.Value : false + ); + + var toPushMeta = timeseriesMap + .Where(kvp => kvp.Value.Source != NodeSource.CDF) + .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); + + if (update.AnyUpdate && toPushMeta.Any() && pushCleanTimeseries) + { + await UpdateTimeseries(toPushMeta, timeseries, nodeToAssetIds, update, report); + } + } + + private async Task> CreateTimeseries( + IDictionary tsMap, + IDictionary nodeToAssetIds, + HashSet mismatchedTimeseries, + BrowseReport report, + bool createMinimalTimeseries + ) + { + var timeseries = await destination.GetOrCreateTimeSeriesAsync( + tsMap.Keys, + ids => + { + var tss = ids.Select(id => tsMap[id]); + var creates = tss.Select( + ts => + ts.ToTimeseries( + config, + extractor, + extractor, + config.Cognite?.DataSet?.Id, + nodeToAssetIds, + config.Cognite?.MetadataMapping?.Timeseries, + createMinimalTimeseries + ) + ) + .Where(ts => ts != null); + if (createMinimalTimeseries) + { + report.MinimalTimeSeriesCreated += creates.Count(); + } + else + { + report.TimeSeriesCreated += creates.Count(); + } + return creates; + }, + RetryMode.None, + SanitationMode.Clean, + token + ); + + log.LogResult(timeseries, RequestType.CreateTimeSeries, true); + + timeseries.ThrowOnFatal(); + + if (timeseries.Results == null) + return Array.Empty(); + + var foundBadTimeseries = new List(); + foreach (var ts in timeseries.Results) + { + var loc = tsMap[ts.ExternalId]; + if (nodeToAssetIds.TryGetValue(loc.ParentId, out var parentId)) + { + nodeToAssetIds[loc.Id] = parentId; + } + if (ts.IsString != loc.FullAttributes.DataType.IsString) + { + mismatchedTimeseries.Add(ts.ExternalId); + foundBadTimeseries.Add(ts.ExternalId); + } + } + if (foundBadTimeseries.Any()) + { + log.LogDebug( + "Found mismatched timeseries when ensuring: {TimeSeries}", + string.Join(", ", foundBadTimeseries) + ); + } + + return timeseries.Results; + } + + private async Task UpdateTimeseries( + IDictionary tsMap, + IEnumerable timeseries, + IDictionary nodeToAssetIds, + TypeUpdateConfig update, + BrowseReport report) + { + var updates = new List(); + var existing = timeseries.ToDictionary(asset => asset.ExternalId); + foreach (var kvp in tsMap) + { + if (existing.TryGetValue(kvp.Key, out var ts)) + { + var tsUpdate = PusherUtils.GetTSUpdate(config, extractor, ts, kvp.Value, update, nodeToAssetIds); + if (tsUpdate == null) continue; + if (tsUpdate.AssetId != null || tsUpdate.Description != null + || tsUpdate.Name != null || tsUpdate.Metadata != null) + { + updates.Add(new TimeSeriesUpdateItem(ts.ExternalId) { Update = tsUpdate }); + } + } + } + + if (updates.Any()) + { + var res = await destination.UpdateTimeSeriesAsync(updates, RetryMode.OnError, SanitationMode.Clean, token); + + log.LogResult(res, RequestType.UpdateTimeSeries, false); + res.ThrowOnFatal(); + + report.TimeSeriesUpdated += res.Results?.Count() ?? 0; + } + } + } +} diff --git a/Extractor/Pushers/Writers/WriterUtils.cs b/Extractor/Pushers/Writers/WriterUtils.cs new file mode 100644 index 00000000..5dbf14e0 --- /dev/null +++ b/Extractor/Pushers/Writers/WriterUtils.cs @@ -0,0 +1,40 @@ +using System.Threading; +using Cognite.Extractor.Utils; +using Cognite.OpcUa.Config; +using Cognite.OpcUa.Pushers.Writers.Interfaces; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; + +namespace Cognite.OpcUa.Pushers.Writers +{ + public static class WriterUtils + { + public static void AddWriters(this IServiceCollection services, CancellationToken token) + { + services.AddSingleton(provider => + { + var config = provider.GetRequiredService(); + var logger = provider.GetRequiredService>(); + var dest = provider.GetRequiredService(); + var extractor = provider.GetRequiredService(); + return new AssetsWriter(logger, token, dest, config, extractor); + }); + services.AddSingleton(provider => + { + var config = provider.GetRequiredService(); + var logger = provider.GetRequiredService>(); + var dest = provider.GetRequiredService(); + var extractor = provider.GetRequiredService(); + return new RawWriter(logger, token, dest, config, extractor); + }); + services.AddSingleton(provider => + { + var config = provider.GetRequiredService(); + var logger = provider.GetRequiredService>(); + var dest = provider.GetRequiredService(); + var extractor = provider.GetRequiredService(); + return new TimeseriesWriter(logger, token, dest, config, extractor); + }); + } + } +} diff --git a/ExtractorLauncher/ExtractorStarter.cs b/ExtractorLauncher/ExtractorStarter.cs index a4ef83da..e1492f6d 100644 --- a/ExtractorLauncher/ExtractorStarter.cs +++ b/ExtractorLauncher/ExtractorStarter.cs @@ -21,6 +21,8 @@ You should have received a copy of the GNU General Public License using Cognite.Extractor.Utils; using Cognite.OpcUa.Config; using Cognite.OpcUa.Pushers; +using Cognite.OpcUa.Pushers.Writers; +using Cognite.OpcUa.Pushers.Writers.Interfaces; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Prometheus; @@ -323,6 +325,7 @@ public static async Task RunExtractor(ILogger? log, ExtractorParams setup, Servi }); services.AddSingleton(); + services.AddWriters(token); var options = new ExtractorRunnerParams { diff --git a/Test/Utils/BaseExtractorTestFixture.cs b/Test/Utils/BaseExtractorTestFixture.cs index c0949e99..f776ff1e 100644 --- a/Test/Utils/BaseExtractorTestFixture.cs +++ b/Test/Utils/BaseExtractorTestFixture.cs @@ -1,4 +1,5 @@ using AdysTech.InfluxDB.Client.Net; +using Cognite.OpcUa.Pushers.Writers; using Cognite.Extractor.Configuration; using Cognite.Extractor.StateStorage; using Cognite.Extractor.Testing; @@ -166,6 +167,7 @@ public async Task ClearLiteDB(InfluxDBClient client) Services.AddCogniteClient("appid", null, true, true, false); var provider = Services.BuildServiceProvider(); var destination = provider.GetRequiredService(); + Services.AddWriters(Source.Token); var pusher = new CDFPusher(Provider.GetRequiredService>(), Config, Config.Cognite, destination, provider); var handler = provider.GetRequiredService(); diff --git a/config/opc.ua.net.extractor.Config.xml b/config/opc.ua.net.extractor.Config.xml index 507711d3..88dbbc30 100644 --- a/config/opc.ua.net.extractor.Config.xml +++ b/config/opc.ua.net.extractor.Config.xml @@ -12,8 +12,8 @@ - X509Store - CurrentUser\My + Directory + ./certificates CN=Opcua-extractor, C=NO, S=Oslo, O=Cognite, DC=localhost From 7bdfc2d44abfc2788c09ce7e203e855b22c43312 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Tue, 4 Jul 2023 14:54:35 +0200 Subject: [PATCH 05/26] refactor: extract writers --- Extractor/Pushers/CDFPusher.cs | 61 +++++++------- Extractor/Pushers/Writers/AssetsWriter.cs | 12 +-- Extractor/Pushers/Writers/CDFWriter.cs | 25 ++++++ .../Writers/Interfaces/IAssetsWriter.cs | 1 + .../Pushers/Writers/Interfaces/ICDFWriter.cs | 10 +++ .../Pushers/Writers/Interfaces/IRawWriter.cs | 1 + .../Interfaces/IRelationshipsWriter.cs | 12 +-- .../Writers/Interfaces/ITimeseriesWriter.cs | 1 + Extractor/Pushers/Writers/RawWriter.cs | 18 ++--- .../Pushers/Writers/RelationshipsWriter.cs | 81 ++++++++++++++++--- Extractor/Pushers/Writers/TimeseriesWriter.cs | 12 +-- Extractor/Pushers/Writers/WriterUtils.cs | 47 ++++++----- ExtractorLauncher/ExtractorStarter.cs | 1 - Test/Utils/BaseExtractorTestFixture.cs | 2 +- 14 files changed, 197 insertions(+), 87 deletions(-) create mode 100644 Extractor/Pushers/Writers/CDFWriter.cs create mode 100644 Extractor/Pushers/Writers/Interfaces/ICDFWriter.cs diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index 13e40130..0b06ae7d 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -48,9 +48,7 @@ public sealed class CDFPusher : IPusher { private readonly CognitePusherConfig config; private readonly FullConfig fullConfig; - private readonly IRawWriter rawWriter; - private readonly ITimeseriesWriter timeseriesWriter; - private readonly IAssetsWriter assetsWriter; + private readonly ICDFWriter cdfWriter; private readonly IDictionary nodeToAssetIds = new Dictionary(); public bool DataFailing { get; set; } @@ -97,10 +95,7 @@ public CDFPusher( BaseConfig = config; this.destination = destination; this.fullConfig = fullConfig; - // rawWriter = provider.GetRequiredService(); - // timeseriesWriter = provider.GetRequiredService(); - // assetsWriter = provider.GetRequiredService(); - Console.WriteLine("testing"); + cdfWriter = provider.GetRequiredService(); if (config.BrowseCallback != null && (config.BrowseCallback.Id.HasValue || !string.IsNullOrEmpty(config.BrowseCallback.ExternalId))) { callback = new BrowseCallback(destination, config.BrowseCallback, log); @@ -372,15 +367,14 @@ await fdmDestination.PushNodes( if (pushCleanAssets && assetsMap.Any()) { - await PushCleanAssets(assetsMap, update.Objects, report, result, token); + await PushCleanAssets(assetsMap, update.Objects, report, result); } isTimeseriesPushed = await PushCleanTimeseries( timeseriesMap, update.Variables, report, - result, - token + result ); var tasks = new List(); @@ -392,7 +386,7 @@ await fdmDestination.PushNodes( if (!pushCleanAssets && assetsMap.Any()) { - tasks.Add(PushRawAssets(assetsMap, update.Objects, report, result, token)); + tasks.Add(PushRawAssets(assetsMap, update.Objects, report, result)); } if (!pushCleanTimeseries) @@ -471,13 +465,12 @@ private async Task PushCleanAssets( ConcurrentDictionary assetsMap, TypeUpdateConfig update, BrowseReport report, - PushResult result, - CancellationToken token + PushResult result ) { try { - await PushCleanAssets(assetsMap, update, report, token); + await cdfWriter.assets.PushNodes(Extractor, assetsMap, nodeToAssetIds, update, report); } catch { @@ -505,13 +498,12 @@ private async Task PushCleanTimeseries( ConcurrentDictionary timeseriesMap, TypeUpdateConfig update, BrowseReport report, - PushResult result, - CancellationToken token + PushResult result ) { try { - await PushCleanTimeseries(timeseriesMap, update, report, token); + await cdfWriter.timeseries.PushVariables(Extractor, timeseriesMap, nodeToAssetIds, mismatchedTimeseries, update, report); } catch { @@ -770,14 +762,11 @@ CancellationToken token if (useRawRelationships) { - await PushRawReferences(relationships, report, token); + await cdfWriter.raw.PushReferences(config.RawMetadata!.Database!, config.RawMetadata!.RelationshipsTable!, relationships, report); } else { - var counts = await Task.WhenAll( - relationships.ChunkBy(1000).Select(chunk => PushReferencesChunk(chunk, token)) - ); - report.RelationshipsCreated += counts.Sum(); + await cdfWriter.relationships.PushReferences(relationships, report); } log.LogInformation("Sucessfully pushed relationships to CDF"); @@ -990,13 +979,21 @@ private async Task PushRawAssets( ConcurrentDictionary assetsMap, TypeUpdateConfig update, BrowseReport report, - PushResult result, - CancellationToken token + PushResult result ) { try { - await PushRawAssets(assetsMap, update, report, token); + if (!pushCleanAssets) { + await cdfWriter.raw.PushNodes( + Extractor, + config.RawMetadata!.Database!, + config.RawMetadata!.AssetsTable!, + assetsMap, + update.AnyUpdate, + report + ); + } } catch (Exception e) { @@ -1279,10 +1276,20 @@ CancellationToken token { try { - await PushRawTimeseries(tsIds, update, report, token); + if (!pushCleanTimeseries) { + await cdfWriter.raw.PushNodes( + Extractor, + config.RawMetadata!.Database!, + config.RawMetadata!.AssetsTable!, + tsIds, + update.AnyUpdate, + report + ); + } } - catch + catch (Exception e) { + log.LogError(e, "Failed to ensure timeseries"); result.Variables = false; } } diff --git a/Extractor/Pushers/Writers/AssetsWriter.cs b/Extractor/Pushers/Writers/AssetsWriter.cs index afd6a05d..4fc6eb8a 100644 --- a/Extractor/Pushers/Writers/AssetsWriter.cs +++ b/Extractor/Pushers/Writers/AssetsWriter.cs @@ -21,38 +21,37 @@ public class AssetsWriter : IAssetsWriter private readonly FullConfig config; private readonly CogniteDestination destination; private readonly CancellationToken token; - private readonly UAExtractor extractor; public AssetsWriter( ILogger logger, CancellationToken token, CogniteDestination destination, - FullConfig config, - UAExtractor extractor) + FullConfig config) { this.log = logger; this.config = config; this.destination = destination; this.token = token; - this.extractor = extractor; } public async Task PushNodes( + UAExtractor extractor, ConcurrentDictionary nodes, IDictionary nodeToAssetIds, TypeUpdateConfig update, BrowseReport report ) { - var assets = await CreateAssets(nodes, nodeToAssetIds, report); + var assets = await CreateAssets(extractor, nodes, nodeToAssetIds, report); if (update.AnyUpdate) { - await UpdateAssets(nodes, assets, update, report); + await UpdateAssets(extractor, nodes, assets, update, report); } } private async Task> CreateAssets( + UAExtractor extractor, IDictionary assetMap, IDictionary nodeToAssetIds, BrowseReport report) @@ -91,6 +90,7 @@ private async Task> CreateAssets( } private async Task UpdateAssets( + UAExtractor extractor, IDictionary assetMap, IEnumerable assets, TypeUpdateConfig update, diff --git a/Extractor/Pushers/Writers/CDFWriter.cs b/Extractor/Pushers/Writers/CDFWriter.cs new file mode 100644 index 00000000..19bf8f13 --- /dev/null +++ b/Extractor/Pushers/Writers/CDFWriter.cs @@ -0,0 +1,25 @@ +using Cognite.OpcUa.Pushers.Writers.Interfaces; + +namespace Cognite.OpcUa.Pushers.Writers +{ + public class CDFWriter : ICDFWriter + { + public IRawWriter raw { get; } + public ITimeseriesWriter timeseries { get; } + public IAssetsWriter assets { get; } + public IRelationshipsWriter relationships{ get; } + + public CDFWriter( + IRawWriter rawWriter, + ITimeseriesWriter timeseriesWriter, + IAssetsWriter assetsWriter, + IRelationshipsWriter relationshipsWriter + ) + { + this.raw = rawWriter; + this.timeseries = timeseriesWriter; + this.assets = assetsWriter; + this.relationships = relationshipsWriter; + } + } +} diff --git a/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs b/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs index 6b03a057..af831cf6 100644 --- a/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs @@ -10,6 +10,7 @@ namespace Cognite.OpcUa.Pushers.Writers.Interfaces public interface IAssetsWriter { Task PushNodes( + UAExtractor extractor, ConcurrentDictionary assetMap, IDictionary nodeToAssetIds, TypeUpdateConfig config, diff --git a/Extractor/Pushers/Writers/Interfaces/ICDFWriter.cs b/Extractor/Pushers/Writers/Interfaces/ICDFWriter.cs new file mode 100644 index 00000000..71246c7d --- /dev/null +++ b/Extractor/Pushers/Writers/Interfaces/ICDFWriter.cs @@ -0,0 +1,10 @@ +namespace Cognite.OpcUa.Pushers.Writers.Interfaces +{ + public interface ICDFWriter + { + IRawWriter raw { get; } + ITimeseriesWriter timeseries { get; } + IAssetsWriter assets { get; } + IRelationshipsWriter relationships { get; } + } +} diff --git a/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs b/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs index 182b1167..a6ac546c 100644 --- a/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs @@ -19,6 +19,7 @@ Task>>> GetRawRows( ); Task PushNodes( + UAExtractor extractor, string database, string table, ConcurrentDictionary rows, diff --git a/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs b/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs index 22246795..db5777c3 100644 --- a/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs @@ -1,14 +1,14 @@ using System.Collections.Generic; using System.Threading.Tasks; -using Cognite.OpcUa.Config; -using Cognite.OpcUa.Types; +using CogniteSdk; -namespace Cognite.OpcUa.Pushers.Destinations.Interfaces +namespace Cognite.OpcUa.Pushers.Writers.Interfaces { public interface IRelationshipsWriter { - FullConfig config { get; } - - Task PushReferences(IEnumerable references, BrowseReport report); + Task PushReferences( + IEnumerable relationships, + BrowseReport report + ); } } diff --git a/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs b/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs index 4b859de8..62ba0deb 100644 --- a/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs @@ -10,6 +10,7 @@ namespace Cognite.OpcUa.Pushers.Writers.Interfaces public interface ITimeseriesWriter { Task PushVariables( + UAExtractor extractor, ConcurrentDictionary timeseriesMap, IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, diff --git a/Extractor/Pushers/Writers/RawWriter.cs b/Extractor/Pushers/Writers/RawWriter.cs index 9a549ecb..7d39e32d 100644 --- a/Extractor/Pushers/Writers/RawWriter.cs +++ b/Extractor/Pushers/Writers/RawWriter.cs @@ -22,21 +22,18 @@ public class RawWriter : IRawWriter private CancellationToken token { get; } private FullConfig config { get; } - private UAExtractor Extractor { get; } private CogniteDestination destination { get; } public RawWriter( ILogger log, CancellationToken token, CogniteDestination destination, - FullConfig config, - UAExtractor extractor + FullConfig config ) { this.log = log; this.token = token; this.config = config; - this.Extractor = extractor; this.destination = destination; } @@ -79,6 +76,7 @@ public async Task>>> GetRawRo } public async Task PushNodes( + UAExtractor extractor, string database, string table, ConcurrentDictionary rows, @@ -89,15 +87,16 @@ BrowseReport report { if (shouldUpdate) { - await UpdateRawAssets(database, table, rows, report); + await UpdateRawAssets(extractor, database, table, rows, report); } else { - await CreateRawAssets(database, table, rows, report); + await CreateRawAssets(extractor, database, table, rows, report); } } private async Task UpdateRawAssets( + UAExtractor extractor, string database, string table, IDictionary dataSet, @@ -121,7 +120,7 @@ await UpsertRawRows( kvp.Key, update: PusherUtils.CreateRawUpdate( log, - Extractor.StringConverter, + extractor.StringConverter, kvp.Value, null, ConverterType.Node @@ -154,7 +153,7 @@ T node { var update = PusherUtils.CreateRawUpdate( log, - Extractor.StringConverter, + extractor.StringConverter, node, row, ConverterType.Node @@ -182,6 +181,7 @@ T node } private async Task CreateRawAssets( + UAExtractor extractor, string database, string table, IDictionary assetMap, @@ -205,7 +205,7 @@ await EnsureRawRows( ( pair.Item1.ToJson( log, - Extractor.StringConverter, + extractor.StringConverter, ConverterType.Node ), pair.id diff --git a/Extractor/Pushers/Writers/RelationshipsWriter.cs b/Extractor/Pushers/Writers/RelationshipsWriter.cs index 4641b5fa..654ddc25 100644 --- a/Extractor/Pushers/Writers/RelationshipsWriter.cs +++ b/Extractor/Pushers/Writers/RelationshipsWriter.cs @@ -1,28 +1,87 @@ using System.Collections.Generic; +using System.Linq; using System.Threading; using System.Threading.Tasks; +using Cognite.Extractor.Common; +using Cognite.Extractor.Utils; using Cognite.OpcUa.Config; -using Cognite.OpcUa.Pushers.Destinations.Interfaces; -using Cognite.OpcUa.Types; +using Cognite.OpcUa.Pushers.Writers.Interfaces; +using CogniteSdk; using Microsoft.Extensions.Logging; -namespace Cognite.OpcUa.Pushers.Destinations +namespace Cognite.OpcUa.Pushers.Writers { public class RelationshipsWriter : IRelationshipsWriter { - private ILogger _logger; - private FullConfig _config; + private readonly ILogger log; + private FullConfig config; + private readonly CogniteDestination destination; + private readonly CancellationToken token; - public FullConfig config => _config; + public RelationshipsWriter( + ILogger logger, + CancellationToken token, + CogniteDestination destination, + FullConfig config + ) + { + this.log = logger; + this.config = config; + this.destination = destination; + this.token = token; + } - public RelationshipsWriter(ILogger logger, dynamic config, CancellationToken token) { - _logger = logger; - _config = config; + public async Task PushReferences( + IEnumerable relationships, + BrowseReport report + ) + { + var counts = await Task.WhenAll( + relationships.ChunkBy(1000).Select(chunk => PushReferencesChunk(chunk, token)) + ); + report.RelationshipsCreated += counts.Sum(); } - public Task PushReferences(IEnumerable references, BrowseReport report) + private async Task PushReferencesChunk( + IEnumerable relationships, + CancellationToken token + ) { - throw new System.NotImplementedException(); + if (!relationships.Any()) + return 0; + try + { + await destination.CogniteClient.Relationships.CreateAsync(relationships, token); + return relationships.Count(); + } + catch (ResponseException ex) + { + if (ex.Duplicated.Any()) + { + var existing = new HashSet(); + foreach (var dict in ex.Duplicated) + { + if (dict.TryGetValue("externalId", out var value)) + { + if (value is MultiValue.String strValue) + { + existing.Add(strValue.Value); + } + } + } + if (!existing.Any()) + throw; + + relationships = relationships + .Where(rel => !existing.Contains(rel.ExternalId)) + .ToList(); + return await PushReferencesChunk(relationships, token); + } + else + { + throw; + } + } } } } diff --git a/Extractor/Pushers/Writers/TimeseriesWriter.cs b/Extractor/Pushers/Writers/TimeseriesWriter.cs index 5d8ef016..024b91a0 100644 --- a/Extractor/Pushers/Writers/TimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/TimeseriesWriter.cs @@ -22,7 +22,6 @@ public class TimeseriesWriter : ITimeseriesWriter private readonly FullConfig config; private readonly CogniteDestination destination; private readonly CancellationToken token; - private readonly UAExtractor extractor; private bool pushCleanTimeseries => string.IsNullOrWhiteSpace(config.Cognite?.RawMetadata?.Database) && string.IsNullOrWhiteSpace(config.Cognite?.RawMetadata?.TimeseriesTable); @@ -31,18 +30,16 @@ public TimeseriesWriter( ILogger logger, CancellationToken token, CogniteDestination destination, - FullConfig config, - UAExtractor extractor) + FullConfig config) { this.log = logger; this.config = config; this.destination = destination; this.token = token; - this.extractor = extractor; } - public async Task PushVariables( + UAExtractor extractor, ConcurrentDictionary timeseriesMap, IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, @@ -52,6 +49,7 @@ BrowseReport report { var skipMeta = config.Cognite?.SkipMetadata; var timeseries = await CreateTimeseries( + extractor, timeseriesMap, nodeToAssetIds, mismatchedTimeseries, @@ -65,11 +63,12 @@ BrowseReport report if (update.AnyUpdate && toPushMeta.Any() && pushCleanTimeseries) { - await UpdateTimeseries(toPushMeta, timeseries, nodeToAssetIds, update, report); + await UpdateTimeseries(extractor, toPushMeta, timeseries, nodeToAssetIds, update, report); } } private async Task> CreateTimeseries( + UAExtractor extractor, IDictionary tsMap, IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, @@ -143,6 +142,7 @@ bool createMinimalTimeseries } private async Task UpdateTimeseries( + UAExtractor extractor, IDictionary tsMap, IEnumerable timeseries, IDictionary nodeToAssetIds, diff --git a/Extractor/Pushers/Writers/WriterUtils.cs b/Extractor/Pushers/Writers/WriterUtils.cs index 5dbf14e0..95a11823 100644 --- a/Extractor/Pushers/Writers/WriterUtils.cs +++ b/Extractor/Pushers/Writers/WriterUtils.cs @@ -11,29 +11,36 @@ public static class WriterUtils { public static void AddWriters(this IServiceCollection services, CancellationToken token) { - services.AddSingleton(provider => + services.AddSingleton(provider => { - var config = provider.GetRequiredService(); - var logger = provider.GetRequiredService>(); - var dest = provider.GetRequiredService(); - var extractor = provider.GetRequiredService(); - return new AssetsWriter(logger, token, dest, config, extractor); - }); - services.AddSingleton(provider => - { - var config = provider.GetRequiredService(); - var logger = provider.GetRequiredService>(); var dest = provider.GetRequiredService(); - var extractor = provider.GetRequiredService(); - return new RawWriter(logger, token, dest, config, extractor); - }); - services.AddSingleton(provider => - { var config = provider.GetRequiredService(); - var logger = provider.GetRequiredService>(); - var dest = provider.GetRequiredService(); - var extractor = provider.GetRequiredService(); - return new TimeseriesWriter(logger, token, dest, config, extractor); + return new CDFWriter( + new RawWriter( + provider.GetRequiredService>(), + token, + dest, + config + ), + new TimeseriesWriter( + provider.GetRequiredService>(), + token, + dest, + config + ), + new AssetsWriter( + provider.GetRequiredService>(), + token, + dest, + config + ), + new RelationshipsWriter( + provider.GetRequiredService>(), + token, + dest, + config + ) + ); }); } } diff --git a/ExtractorLauncher/ExtractorStarter.cs b/ExtractorLauncher/ExtractorStarter.cs index e1492f6d..6518869d 100644 --- a/ExtractorLauncher/ExtractorStarter.cs +++ b/ExtractorLauncher/ExtractorStarter.cs @@ -22,7 +22,6 @@ You should have received a copy of the GNU General Public License using Cognite.OpcUa.Config; using Cognite.OpcUa.Pushers; using Cognite.OpcUa.Pushers.Writers; -using Cognite.OpcUa.Pushers.Writers.Interfaces; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Prometheus; diff --git a/Test/Utils/BaseExtractorTestFixture.cs b/Test/Utils/BaseExtractorTestFixture.cs index f776ff1e..7efaebf0 100644 --- a/Test/Utils/BaseExtractorTestFixture.cs +++ b/Test/Utils/BaseExtractorTestFixture.cs @@ -165,9 +165,9 @@ public async Task ClearLiteDB(InfluxDBClient client) { CommonTestUtils.AddDummyProvider("test", CDFMockHandler.MockMode.None, true, Services); Services.AddCogniteClient("appid", null, true, true, false); + Services.AddWriters(Source.Token); var provider = Services.BuildServiceProvider(); var destination = provider.GetRequiredService(); - Services.AddWriters(Source.Token); var pusher = new CDFPusher(Provider.GetRequiredService>(), Config, Config.Cognite, destination, provider); var handler = provider.GetRequiredService(); From 43b872e181e9f1083103c11cfcd5a96fa845ee57 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Wed, 5 Jul 2023 11:35:50 +0200 Subject: [PATCH 06/26] test: fix failing tests --- Extractor/Pushers/CDFPusher.cs | 79 ++++++------ Extractor/Pushers/Writers/Dtos/Result.cs | 8 ++ .../Pushers/Writers/Interfaces/IRawWriter.cs | 16 +-- .../Writers/Interfaces/ITimeseriesWriter.cs | 9 +- Extractor/Pushers/Writers/RawWriter.cs | 120 +++++------------- Extractor/Pushers/Writers/TimeseriesWriter.cs | 24 ++-- Test/Integration/NodeExtractionTests.cs | 6 +- 7 files changed, 109 insertions(+), 153 deletions(-) create mode 100644 Extractor/Pushers/Writers/Dtos/Result.cs diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index 0b06ae7d..f857886c 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -389,9 +389,9 @@ await fdmDestination.PushNodes( tasks.Add(PushRawAssets(assetsMap, update.Objects, report, result)); } - if (!pushCleanTimeseries) + if (!pushCleanTimeseries && timeseriesMap.Any()) { - tasks.Add(PushRawTimeseries(timeseriesMap, update.Variables, report, result, token)); + tasks.Add(PushRawTimeseries(timeseriesMap, update.Variables, report, result)); } tasks.Add(PushReferences(references, report, result, token)); @@ -426,13 +426,7 @@ CancellationToken token bool pushResult = true; try { - pushResult = await fdmDestination!.PushNodes( - objects, - variables, - references, - Extractor, - token - ); + pushResult = await fdmDestination!.PushNodes(objects, variables, references, Extractor, token); } catch { @@ -503,7 +497,17 @@ PushResult result { try { - await cdfWriter.timeseries.PushVariables(Extractor, timeseriesMap, nodeToAssetIds, mismatchedTimeseries, update, report); + var _result = await cdfWriter.timeseries.PushVariables(Extractor, timeseriesMap, nodeToAssetIds, mismatchedTimeseries, update); + var skipMetadata = config.SkipMetadata; + var createMinimal = !pushCleanTimeseries || skipMetadata; + if (createMinimal) + { + report.MinimalTimeSeriesCreated += _result.Created; + } + else + { + report.TimeSeriesCreated += _result.Created; + } } catch { @@ -762,7 +766,8 @@ CancellationToken token if (useRawRelationships) { - await cdfWriter.raw.PushReferences(config.RawMetadata!.Database!, config.RawMetadata!.RelationshipsTable!, relationships, report); + var _result = await cdfWriter.raw.PushReferences(config.RawMetadata!.Database!, config.RawMetadata!.RelationshipsTable!, relationships); + report.RelationshipsCreated += _result.Created; } else { @@ -984,16 +989,16 @@ PushResult result { try { - if (!pushCleanAssets) { - await cdfWriter.raw.PushNodes( - Extractor, - config.RawMetadata!.Database!, - config.RawMetadata!.AssetsTable!, - assetsMap, - update.AnyUpdate, - report - ); - } + var _result = await cdfWriter.raw.PushNodes( + Extractor, + config.RawMetadata!.Database!, + config.RawMetadata!.AssetsTable!, + assetsMap, + ConverterType.Node, + update.AnyUpdate + ); + report.AssetsCreated += _result.Created; + report.AssetsUpdated += _result.Updated; } catch (Exception e) { @@ -1266,26 +1271,24 @@ private async Task UpdateTimeseries( /// /// Timeseries to push /// Configuration for which fields, if any, to update in CDF - private async Task PushRawTimeseries( - ConcurrentDictionary tsIds, - TypeUpdateConfig update, - BrowseReport report, - PushResult result, - CancellationToken token - ) + private async Task PushRawTimeseries(ConcurrentDictionary tsIds, TypeUpdateConfig update, BrowseReport report, PushResult result) { try { - if (!pushCleanTimeseries) { - await cdfWriter.raw.PushNodes( - Extractor, - config.RawMetadata!.Database!, - config.RawMetadata!.AssetsTable!, - tsIds, - update.AnyUpdate, - report - ); - } + var toPushMeta = tsIds + .Where(kvp => kvp.Value.Source != NodeSource.CDF) + .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); + + var _result = await cdfWriter.raw.PushNodes( + Extractor, + config.RawMetadata!.Database!, + config.RawMetadata!.TimeseriesTable!, + toPushMeta, + ConverterType.Variable, + update.AnyUpdate && !config.SkipMetadata + ); + report.TimeSeriesCreated += _result.Created; + report.TimeSeriesUpdated += _result.Updated; } catch (Exception e) { diff --git a/Extractor/Pushers/Writers/Dtos/Result.cs b/Extractor/Pushers/Writers/Dtos/Result.cs new file mode 100644 index 00000000..51ad8a14 --- /dev/null +++ b/Extractor/Pushers/Writers/Dtos/Result.cs @@ -0,0 +1,8 @@ +namespace Cognite.OpcUa.Pushers.Writers.Dtos +{ + public class Result + { + public int Created { get; set; } + public int Updated { get; set; } + } +} diff --git a/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs b/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs index a6ac546c..f39783b4 100644 --- a/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs @@ -1,8 +1,9 @@ -using System.Collections.Concurrent; using System.Collections.Generic; using System.Text.Json; using System.Threading.Tasks; using Cognite.OpcUa.Nodes; +using Cognite.OpcUa.Pushers.Writers.Dtos; +using Cognite.OpcUa.Types; using CogniteSdk; namespace Cognite.OpcUa.Pushers.Writers.Interfaces @@ -18,21 +19,20 @@ Task>>> GetRawRows( IEnumerable? columns ); - Task PushNodes( + Task PushNodes( UAExtractor extractor, string database, string table, - ConcurrentDictionary rows, - bool shouldUpdate, - BrowseReport report + IDictionary rows, + ConverterType converter, + bool shouldUpdate ) where T : BaseUANode; - Task PushReferences( + Task PushReferences( string database, string table, - IEnumerable relationships, - BrowseReport report + IEnumerable relationships ); } } diff --git a/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs b/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs index 62ba0deb..177b130a 100644 --- a/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs @@ -3,20 +3,19 @@ using System.Threading.Tasks; using Cognite.OpcUa.Config; using Cognite.OpcUa.Nodes; +using Cognite.OpcUa.Pushers.Writers.Dtos; using Opc.Ua; namespace Cognite.OpcUa.Pushers.Writers.Interfaces { public interface ITimeseriesWriter { - Task PushVariables( + Task PushVariables( UAExtractor extractor, ConcurrentDictionary timeseriesMap, IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, - TypeUpdateConfig update, - BrowseReport report - ) -; + TypeUpdateConfig update + ); } } diff --git a/Extractor/Pushers/Writers/RawWriter.cs b/Extractor/Pushers/Writers/RawWriter.cs index 7d39e32d..9344b854 100644 --- a/Extractor/Pushers/Writers/RawWriter.cs +++ b/Extractor/Pushers/Writers/RawWriter.cs @@ -1,5 +1,4 @@ using System; -using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Text.Json; @@ -8,6 +7,7 @@ using Cognite.Extractor.Utils; using Cognite.OpcUa.Config; using Cognite.OpcUa.Nodes; +using Cognite.OpcUa.Pushers.Writers.Dtos; using Cognite.OpcUa.Pushers.Writers.Interfaces; using Cognite.OpcUa.Types; using CogniteSdk; @@ -75,38 +75,24 @@ public async Task>>> GetRawRo return rows; } - public async Task PushNodes( - UAExtractor extractor, - string database, - string table, - ConcurrentDictionary rows, - bool shouldUpdate, - BrowseReport report - ) - where T : BaseUANode + public async Task PushNodes(UAExtractor extractor, string database, string table, IDictionary rows, ConverterType converter, bool shouldUpdate) where T : BaseUANode { + var result = new Result { Created = 0, Updated = 0 }; + if (shouldUpdate) { - await UpdateRawAssets(extractor, database, table, rows, report); + await Update(extractor, database, table, rows, converter, result); } else { - await CreateRawAssets(extractor, database, table, rows, report); + await Create(extractor, database, table, rows, converter, result); } + return result; } - private async Task UpdateRawAssets( - UAExtractor extractor, - string database, - string table, - IDictionary dataSet, - BrowseReport report - ) - where T : BaseUANode + private async Task Update(UAExtractor extractor, string database, string table, IDictionary dataSet, ConverterType converter, Result result) where T : BaseUANode { - if (database == null || table == null) - return; - await UpsertRawRows( + await UpsertRows( database, table, rows => @@ -116,33 +102,20 @@ await UpsertRawRows( return dataSet .Select( kvp => - ( - kvp.Key, - update: PusherUtils.CreateRawUpdate( - log, - extractor.StringConverter, - kvp.Value, - null, - ConverterType.Node - ) - ) + (kvp.Key, update: PusherUtils.CreateRawUpdate(log, extractor.StringConverter, kvp.Value, null, converter)) ) .Where(elem => elem.update != null) .ToDictionary(pair => pair.Key, pair => pair.update!.Value); } var toWrite = - new List<( - string key, - RawRow> row, - T node - )>(); + new List<(string key, RawRow> row, T node)>(); foreach (var row in rows) { - if (dataSet.TryGetValue(row.Key, out var ts)) + if (dataSet.TryGetValue(row.Key, out var node)) { - toWrite.Add((row.Key, row, ts)); + toWrite.Add((row.Key, row, node)); dataSet.Remove(row.Key); } } @@ -151,24 +124,18 @@ T node foreach (var (key, row, node) in toWrite) { - var update = PusherUtils.CreateRawUpdate( - log, - extractor.StringConverter, - node, - row, - ConverterType.Node - ); + var update = PusherUtils.CreateRawUpdate(log, extractor.StringConverter, node, row, converter); if (update != null) { updates[key] = update.Value; if (row == null) { - report.AssetsCreated++; + result.Created++; } else { - report.AssetsUpdated++; + result.Updated++; } } } @@ -180,47 +147,27 @@ T node ); } - private async Task CreateRawAssets( - UAExtractor extractor, - string database, - string table, - IDictionary assetMap, - BrowseReport report - ) - where T : BaseUANode + private async Task Create(UAExtractor extractor, string database, string table, IDictionary dataMap, ConverterType converter, Result result) where T : BaseUANode { - if (database == null || table == null) - return; - - await EnsureRawRows( + await EnsureRows( database, table, - assetMap.Keys, + dataMap.Keys, ids => { - var assets = ids.Select(id => (assetMap[id], id)); - var creates = assets - .Select( - pair => - ( - pair.Item1.ToJson( - log, - extractor.StringConverter, - ConverterType.Node - ), - pair.id - ) - ) + var rows = ids.Select(id => (dataMap[id], id)); + var creates = rows + .Select(pair => (pair.Item1.ToJson(log, extractor.StringConverter, converter), pair.id)) .Where(pair => pair.Item1 != null) .ToDictionary(pair => pair.id, pair => pair.Item1!.RootElement); - report.AssetsCreated += creates.Count; + result.Created += creates.Count; return creates; }, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase } ); } - private async Task UpsertRawRows( + private async Task UpsertRows( string dbName, string tableName, Func< @@ -269,13 +216,7 @@ async Task CallAndCreate(IEnumerable>>? r log.LogInformation("Updated or created {Count} rows in CDF Raw", count); } - private async Task EnsureRawRows( - string dbName, - string tableName, - IEnumerable keys, - Func, IDictionary> dtoBuilder, - JsonSerializerOptions options - ) + private async Task EnsureRows(string dbName, string tableName, IEnumerable keys, Func, IDictionary> dtoBuilder, JsonSerializerOptions options) { var rows = await GetRawRows(dbName, tableName, new[] { "," }); var existing = rows.Select(row => row.Key); @@ -290,14 +231,14 @@ JsonSerializerOptions options await destination.InsertRawRowsAsync(dbName, tableName, createDtos, options, token); } - public async Task PushReferences( + public async Task PushReferences( string database, string table, - IEnumerable relationships, - BrowseReport report + IEnumerable relationships ) { - await EnsureRawRows( + var result = new Result { Created = 0, Updated = 0 }; + await EnsureRows( database, table, relationships.Select(rel => rel.ExternalId), @@ -307,11 +248,12 @@ await EnsureRawRows( var creates = relationships .Where(rel => idSet.Contains(rel.ExternalId)) .ToDictionary(rel => rel.ExternalId); - report.RelationshipsCreated += creates.Count; + result.Created += creates.Count; return creates; }, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase } ); + return result; } } } diff --git a/Extractor/Pushers/Writers/TimeseriesWriter.cs b/Extractor/Pushers/Writers/TimeseriesWriter.cs index 024b91a0..a96b16ac 100644 --- a/Extractor/Pushers/Writers/TimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/TimeseriesWriter.cs @@ -9,6 +9,7 @@ using Cognite.OpcUa.Config; using Cognite.OpcUa.Nodes; using Cognite.OpcUa.NodeSources; +using Cognite.OpcUa.Pushers.Writers.Dtos; using Cognite.OpcUa.Pushers.Writers.Interfaces; using CogniteSdk; using Microsoft.Extensions.Logging; @@ -38,23 +39,23 @@ public TimeseriesWriter( this.token = token; } - public async Task PushVariables( + public async Task PushVariables( UAExtractor extractor, ConcurrentDictionary timeseriesMap, IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, - TypeUpdateConfig update, - BrowseReport report + TypeUpdateConfig update ) { + var result = new Result { Created = 0, Updated = 0 }; var skipMeta = config.Cognite?.SkipMetadata; var timeseries = await CreateTimeseries( extractor, timeseriesMap, nodeToAssetIds, mismatchedTimeseries, - report, - !pushCleanTimeseries && skipMeta.HasValue ? skipMeta.Value : false + result, + !pushCleanTimeseries || (skipMeta.HasValue ? skipMeta.Value : false) ); var toPushMeta = timeseriesMap @@ -63,8 +64,9 @@ BrowseReport report if (update.AnyUpdate && toPushMeta.Any() && pushCleanTimeseries) { - await UpdateTimeseries(extractor, toPushMeta, timeseries, nodeToAssetIds, update, report); + await UpdateTimeseries(extractor, toPushMeta, timeseries, nodeToAssetIds, update, result); } + return result; } private async Task> CreateTimeseries( @@ -72,7 +74,7 @@ private async Task> CreateTimeseries( IDictionary tsMap, IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, - BrowseReport report, + Result result, bool createMinimalTimeseries ) { @@ -96,11 +98,11 @@ bool createMinimalTimeseries .Where(ts => ts != null); if (createMinimalTimeseries) { - report.MinimalTimeSeriesCreated += creates.Count(); + result.Created += creates.Count(); } else { - report.TimeSeriesCreated += creates.Count(); + result.Created += creates.Count(); } return creates; }, @@ -147,7 +149,7 @@ private async Task UpdateTimeseries( IEnumerable timeseries, IDictionary nodeToAssetIds, TypeUpdateConfig update, - BrowseReport report) + Result result) { var updates = new List(); var existing = timeseries.ToDictionary(asset => asset.ExternalId); @@ -172,7 +174,7 @@ private async Task UpdateTimeseries( log.LogResult(res, RequestType.UpdateTimeSeries, false); res.ThrowOnFatal(); - report.TimeSeriesUpdated += res.Results?.Count() ?? 0; + result.Updated += res.Results?.Count() ?? 0; } } } diff --git a/Test/Integration/NodeExtractionTests.cs b/Test/Integration/NodeExtractionTests.cs index 69e55bf6..1c30ad35 100644 --- a/Test/Integration/NodeExtractionTests.cs +++ b/Test/Integration/NodeExtractionTests.cs @@ -28,6 +28,8 @@ public NodeExtractionTestFixture() : base() public class NodeExtractionTests : IClassFixture { private readonly NodeExtractionTestFixture tester; + private readonly ITestOutputHelper _output; + public NodeExtractionTests(ITestOutputHelper output, NodeExtractionTestFixture tester) { this.tester = tester ?? throw new ArgumentNullException(nameof(tester)); @@ -35,6 +37,7 @@ public NodeExtractionTests(ITestOutputHelper output, NodeExtractionTestFixture t tester.ResetConfig(); tester.Config.History.Enabled = false; tester.Client.TypeManager.Reset(); + _output = output; } #region datatypeconfig [Fact] @@ -906,8 +909,7 @@ public async Task TestUpdateFields( [InlineData(true, false)] [InlineData(false, true)] [InlineData(true, true)] - public async Task TestUpdateFieldsRaw( - bool assets, bool timeseries) + public async Task TestUpdateFieldsRaw(bool assets, bool timeseries) { var (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); From ecc6e87a088e132b57a74fd3d60ac7790c1f247d Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Wed, 5 Jul 2023 11:36:47 +0200 Subject: [PATCH 07/26] test: fix failing test --- Extractor/Pushers/CDFPusher.cs | 667 +----------------- .../Pushers/Writers/Interfaces/IRawWriter.cs | 4 +- .../Interfaces/IRelationshipsWriter.cs | 7 +- Extractor/Pushers/Writers/RawWriter.cs | 3 +- .../Pushers/Writers/RelationshipsWriter.cs | 20 +- Extractor/Pushers/Writers/TimeseriesWriter.cs | 8 +- Extractor/Pushers/Writers/WriterUtils.cs | 1 - 7 files changed, 38 insertions(+), 672 deletions(-) diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index f857886c..ef4fc7e9 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -80,6 +80,9 @@ public sealed class CDFPusher : IPusher private bool pushCleanTimeseries => string.IsNullOrWhiteSpace(config.RawMetadata?.Database) && string.IsNullOrWhiteSpace(config.RawMetadata?.TimeseriesTable); + private bool pushCleanReferences => + string.IsNullOrWhiteSpace(config.RawMetadata?.Database) + && string.IsNullOrWhiteSpace(config.RawMetadata?.RelationshipsTable); public CDFPusher( @@ -439,11 +442,13 @@ CancellationToken token private ConcurrentDictionary MapAssets(IEnumerable objects) { - return config.SkipMetadata ? new ConcurrentDictionary() : new ConcurrentDictionary( - objects - .Where(node => node.Source != NodeSource.CDF) - .ToDictionary(obj => Extractor.GetUniqueId(obj.Id)!) - ); + return config.SkipMetadata ? + new ConcurrentDictionary() : + new ConcurrentDictionary( + objects + .Where(node => node.Source != NodeSource.CDF) + .ToDictionary(obj => Extractor.GetUniqueId(obj.Id)!) + ); } private ConcurrentDictionary MapTimeseries( @@ -473,21 +478,6 @@ PushResult result return result.Objects; } - private async Task PushCleanAssets( - ConcurrentDictionary assetsMap, - TypeUpdateConfig update, - BrowseReport report, - CancellationToken token - ) - { - var assets = await CreateAssets(assetsMap, report, token); - - if (update.AnyUpdate) - { - await UpdateAssets(assetsMap, assets, update, report, token); - } - } - private async Task PushCleanTimeseries( ConcurrentDictionary timeseriesMap, TypeUpdateConfig update, @@ -508,6 +498,7 @@ PushResult result { report.TimeSeriesCreated += _result.Created; } + report.TimeSeriesUpdated += _result.Updated; } catch { @@ -517,30 +508,6 @@ PushResult result return result.Variables; } - private async Task PushCleanTimeseries( - ConcurrentDictionary timeseriesMap, - TypeUpdateConfig update, - BrowseReport report, - CancellationToken token - ) - { - var timeseries = await CreateTimeseries( - timeseriesMap, - report, - !pushCleanTimeseries || config.SkipMetadata, - token - ); - - var toPushMeta = timeseriesMap - .Where(kvp => kvp.Value.Source != NodeSource.CDF) - .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); - - if (update.AnyUpdate && toPushMeta.Any() && pushCleanTimeseries) - { - await UpdateTimeseries(toPushMeta, timeseries, update, report, token); - } - } - /// /// Reset the pusher, preparing it to be restarted /// @@ -735,46 +702,23 @@ CancellationToken token { try { - await PushReferences(references, report, token); - } - catch (Exception e) - { - log.LogError(e, "Failed to ensure references"); - result.References = false; - } - } + if (references == null || !references.Any()) + return; - private async Task PushReferences( - IEnumerable references, - BrowseReport report, - CancellationToken token - ) - { - if (references == null || !references.Any()) - return; - - var relationships = references - .Select(reference => reference.ToRelationship(config.DataSet?.Id, Extractor)) - .DistinctBy(rel => rel.ExternalId); + var relationships = references + .Select(reference => reference.ToRelationship(config.DataSet?.Id, Extractor)) + .DistinctBy(rel => rel.ExternalId); - bool useRawRelationships = - config.RawMetadata != null - && !string.IsNullOrWhiteSpace(config.RawMetadata.Database) - && !string.IsNullOrWhiteSpace(config.RawMetadata.RelationshipsTable); - - log.LogInformation("Test {Count} relationships against CDF", references.Count()); - - if (useRawRelationships) - { - var _result = await cdfWriter.raw.PushReferences(config.RawMetadata!.Database!, config.RawMetadata!.RelationshipsTable!, relationships); + var _result = pushCleanReferences ? + await cdfWriter.relationships.PushReferences(relationships, token) : + await cdfWriter.raw.PushReferences(config.RawMetadata!.Database!, config.RawMetadata!.RelationshipsTable!, relationships, token); report.RelationshipsCreated += _result.Created; } - else + catch (Exception e) { - await cdfWriter.relationships.PushReferences(relationships, report); + log.LogError(e, "Failed to ensure references"); + result.References = false; } - - log.LogInformation("Sucessfully pushed relationships to CDF"); } public async Task ExecuteDeletes(DeletedNodes deletes, CancellationToken token) @@ -810,171 +754,6 @@ public async Task ExecuteDeletes(DeletedNodes deletes, CancellationToken t #endregion #region assets - /// - /// Update list of nodes as assets in CDF Raw. - /// - /// Id, node map for the assets that should be pushed. - private async Task UpdateRawAssets(IDictionary assetMap, BrowseReport report, CancellationToken token) - { - if (config.RawMetadata?.Database == null || config.RawMetadata?.AssetsTable == null) return; - await UpsertRawRows(config.RawMetadata.Database, config.RawMetadata.AssetsTable, rows => - { - if (rows == null) - { - return assetMap.Select(kvp => ( - kvp.Key, - update: PusherUtils.CreateRawUpdate(log, Extractor.StringConverter, kvp.Value, null, ConverterType.Node) - )).Where(elem => elem.update != null) - .ToDictionary(pair => pair.Key, pair => pair.update!.Value); - } - - var toWrite = new List<(string key, RawRow> row, BaseUANode node)>(); - - foreach (var row in rows) - { - if (assetMap.TryGetValue(row.Key, out var ts)) - { - toWrite.Add((row.Key, row, ts)); - assetMap.Remove(row.Key); - } - } - - var updates = new Dictionary(); - - foreach (var (key, row, node) in toWrite) - { - var update = PusherUtils.CreateRawUpdate(log, Extractor.StringConverter, node, row, ConverterType.Node); - - if (update != null) - { - updates[key] = update.Value; - if (row == null) - { - report.AssetsCreated++; - } - else - { - report.AssetsUpdated++; - } - } - } - - return updates; - }, null, token); - } - - /// - /// Create list of nodes as assets in CDF Raw. - /// This does not create rows if they already exist. - /// - /// Id, node map for the assets that should be pushed. - private async Task CreateRawAssets(IDictionary assetMap, BrowseReport report, CancellationToken token) - { - if (config.RawMetadata?.Database == null || config.RawMetadata?.AssetsTable == null) return; - - await EnsureRawRows(config.RawMetadata.Database, config.RawMetadata.AssetsTable, assetMap.Keys, ids => - { - var assets = ids.Select(id => (assetMap[id], id)); - var creates = assets.Select(pair => (pair.Item1.ToJson(log, Extractor.StringConverter, ConverterType.Node), pair.id)) - .Where(pair => pair.Item1 != null) - .ToDictionary(pair => pair.id, pair => pair.Item1!.RootElement); - report.AssetsCreated += creates.Count; - return creates; - }, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }, token); - } - - /// - /// Create assets in CDF Clean. - /// - /// Id, node map for the assets that should be pushed. - private async Task> CreateAssets(IDictionary assetMap, BrowseReport report, CancellationToken token) - { - var assets = new List(); - foreach (var chunk in Chunking.ChunkByHierarchy(assetMap.Values, config.CdfChunking.Assets, node => node.Id, node => node.ParentId)) - { - var assetChunk = await destination.GetOrCreateAssetsAsync(chunk.Select(node => Extractor.GetUniqueId(node.Id)!), ids => - { - var assets = ids.Select(id => assetMap[id]); - var creates = assets - .Select(node => node.ToCDFAsset(fullConfig, Extractor, config.DataSet?.Id, config.MetadataMapping?.Assets)) - .Where(asset => asset != null); - report.AssetsCreated += creates.Count(); - return creates; - }, RetryMode.None, SanitationMode.Clean, token); - - log.LogResult(assetChunk, RequestType.CreateAssets, true); - - assetChunk.ThrowOnFatal(); - - if (assetChunk.Results == null) continue; - - foreach (var asset in assetChunk.Results) - { - nodeToAssetIds[assetMap[asset.ExternalId].Id] = asset.Id; - } - assets.AddRange(assetChunk.Results); - } - return assets; - } - - /// - /// Update assets in CDF Clean. - /// - /// Id, node map for the assets that should be pushed. - /// List of existing assets in CDF. - /// Configuration for which fields should be updated. - private async Task UpdateAssets( - IDictionary assetMap, - IEnumerable assets, - TypeUpdateConfig update, - BrowseReport report, - CancellationToken token - ) - { - var updates = new List(); - var existing = assets.ToDictionary(asset => asset.ExternalId); - foreach (var kvp in assetMap) - { - if (existing.TryGetValue(kvp.Key, out var asset)) - { - var assetUpdate = PusherUtils.GetAssetUpdate( - fullConfig, - asset, - kvp.Value, - Extractor, - update - ); - - if (assetUpdate == null) - continue; - if ( - assetUpdate.ParentExternalId != null - || assetUpdate.Description != null - || assetUpdate.Name != null - || assetUpdate.Metadata != null - ) - { - updates.Add(new AssetUpdateItem(asset.ExternalId) { Update = assetUpdate }); - } - } - } - if (updates.Any()) - { - var res = await destination.UpdateAssetsAsync( - updates, - RetryMode.OnError, - SanitationMode.Clean, - token - ); - - log.LogResult(res, RequestType.UpdateAssets, false); - - res.ThrowOnFatal(); - - report.AssetsUpdated += res.Results?.Count() ?? 0; - } - } - /// /// Master method for pushing assets to CDF raw. /// @@ -1007,23 +786,6 @@ PushResult result } } - private async Task PushRawAssets( - ConcurrentDictionary assetsMap, - TypeUpdateConfig update, - BrowseReport report, - CancellationToken token - ) - { - if (update.AnyUpdate) - { - await UpdateRawAssets(assetsMap, report, token); - } - else - { - await CreateRawAssets(assetsMap, report, token); - } - } - private async Task MarkAssetsAsDeleted( IEnumerable externalIds, CancellationToken token @@ -1075,197 +837,6 @@ await MarkRawRowsAsDeleted( #endregion #region timeseries - /// - /// Update list of nodes as timeseries in CDF Raw. - /// - /// Id, node map for the timeseries that should be pushed. - private async Task UpdateRawTimeseries(IDictionary tsMap, BrowseReport report, CancellationToken token) - { - if (config.RawMetadata?.Database == null || config.RawMetadata.TimeseriesTable == null) return; - await UpsertRawRows(config.RawMetadata.Database, config.RawMetadata.TimeseriesTable, rows => - { - if (rows == null) - { - return tsMap.Select(kvp => ( - kvp.Key, - update: PusherUtils.CreateRawUpdate(log, Extractor.StringConverter, kvp.Value, null, ConverterType.Variable) - )).Where(elem => elem.update != null) - .ToDictionary(pair => pair.Key, pair => pair.update!.Value); - } - - var toWrite = new List<(string key, RawRow> row, UAVariable node)>(); - - foreach (var row in rows) - { - if (tsMap.TryGetValue(row.Key, out var ts)) - { - toWrite.Add((row.Key, row, ts)); - tsMap.Remove(row.Key); - } - } - - var updates = new Dictionary(); - - foreach (var (key, row, node) in toWrite) - { - var update = PusherUtils.CreateRawUpdate(log, Extractor.StringConverter, node, row, ConverterType.Variable); - - if (update != null) - { - updates[key] = update.Value; - if (row == null) - { - report.TimeSeriesCreated++; - } - else - { - report.TimeSeriesUpdated++; - } - } - } - - return updates; - }, null, token); - } - - /// - /// Create list of nodes as timeseries in CDF Raw. - /// This does not create rows if they already exist. - /// - /// Id, node map for the timeseries that should be pushed. - private async Task CreateRawTimeseries(IDictionary tsMap, BrowseReport report, CancellationToken token) - { - if (config.RawMetadata?.Database == null || config.RawMetadata.TimeseriesTable == null) return; - - await EnsureRawRows(config.RawMetadata.Database, config.RawMetadata.TimeseriesTable, tsMap.Keys, ids => - { - var timeseries = ids.Select(id => (tsMap[id], id)); - var creates = timeseries.Select(pair => (pair.Item1.ToJson(log, Extractor.StringConverter, ConverterType.Variable), pair.id)) - .Where(pair => pair.Item1 != null) - .ToDictionary(pair => pair.id, pair => pair.Item1!.RootElement); - - report.TimeSeriesCreated += creates.Count; - return creates; - }, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }, token); - } - - /// - /// Create timeseries in CDF Clean, optionally creates only minimal timeseries with no metadata or context. - /// - /// Id, node map for the timeseries that should be pushed. - /// True to create timeseries with no metadata. - private async Task> CreateTimeseries( - IDictionary tsMap, - BrowseReport report, - bool createMinimalTimeseries, - CancellationToken token - ) - { - var timeseries = await destination.GetOrCreateTimeSeriesAsync( - tsMap.Keys, - ids => - { - var tss = ids.Select(id => tsMap[id]); - var creates = tss.Select( - ts => - ts.ToTimeseries( - fullConfig, - Extractor, - Extractor, - config.DataSet?.Id, - nodeToAssetIds, - config.MetadataMapping?.Timeseries, - createMinimalTimeseries - ) - ) - .Where(ts => ts != null); - if (createMinimalTimeseries) - { - report.MinimalTimeSeriesCreated += creates.Count(); - } - else - { - report.TimeSeriesCreated += creates.Count(); - } - return creates; - }, - RetryMode.None, - SanitationMode.Clean, - token - ); - - log.LogResult(timeseries, RequestType.CreateTimeSeries, true); - - timeseries.ThrowOnFatal(); - - if (timeseries.Results == null) - return Array.Empty(); - - var foundBadTimeseries = new List(); - foreach (var ts in timeseries.Results) - { - var loc = tsMap[ts.ExternalId]; - if (nodeToAssetIds.TryGetValue(loc.ParentId, out var parentId)) - { - nodeToAssetIds[loc.Id] = parentId; - } - if (ts.IsString != loc.FullAttributes.DataType.IsString) - { - mismatchedTimeseries.Add(ts.ExternalId); - foundBadTimeseries.Add(ts.ExternalId); - } - } - if (foundBadTimeseries.Any()) - { - log.LogDebug( - "Found mismatched timeseries when ensuring: {TimeSeries}", - string.Join(", ", foundBadTimeseries) - ); - } - - return timeseries.Results; - } - - /// - /// Update timeseries in CDF Clean. - /// - /// Id, node map for the timeseries that should be pushed. - /// List of existing timeseries in CDF. - /// Configuration for which fields should be updated. - private async Task UpdateTimeseries( - IDictionary tsMap, - IEnumerable timeseries, - TypeUpdateConfig update, - BrowseReport report, - CancellationToken token) - { - var updates = new List(); - var existing = timeseries.ToDictionary(asset => asset.ExternalId); - foreach (var kvp in tsMap) - { - if (existing.TryGetValue(kvp.Key, out var ts)) - { - var tsUpdate = PusherUtils.GetTSUpdate(fullConfig, Extractor, ts, kvp.Value, update, nodeToAssetIds); - if (tsUpdate == null) continue; - if (tsUpdate.AssetId != null || tsUpdate.Description != null - || tsUpdate.Name != null || tsUpdate.Metadata != null) - { - updates.Add(new TimeSeriesUpdateItem(ts.ExternalId) { Update = tsUpdate }); - } - } - } - - if (updates.Any()) - { - var res = await destination.UpdateTimeSeriesAsync(updates, RetryMode.OnError, SanitationMode.Clean, token); - - log.LogResult(res, RequestType.UpdateTimeSeries, false); - res.ThrowOnFatal(); - - report.TimeSeriesUpdated += res.Results?.Count() ?? 0; - } - } - /// /// Master method for pushing timeseries to CDF raw or clean. /// @@ -1297,27 +868,6 @@ private async Task PushRawTimeseries(ConcurrentDictionary ts } } - private async Task PushRawTimeseries( - ConcurrentDictionary tsIds, - TypeUpdateConfig update, - BrowseReport report, - CancellationToken token - ) - { - var toPushMeta = tsIds - .Where(kvp => kvp.Value.Source != NodeSource.CDF) - .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); - - if (update.AnyUpdate && !config.SkipMetadata) - { - await UpdateRawTimeseries(toPushMeta, report, token); - } - else - { - await CreateRawTimeseries(toPushMeta, report, token); - } - } - private async Task MarkTimeSeriesAsDeleted( IEnumerable externalIds, CancellationToken token @@ -1366,98 +916,6 @@ await MarkRawRowsAsDeleted( #endregion #region raw-utils - /// - /// Ensure that raw rows given by exist in the table given by - /// and . - /// Keys that do not exist are built into DTOs by . - /// - /// Type of DTO to build - /// Name of database in CDF Raw - /// Name of table in CDF Raw - /// Keys of rows to ensure - /// Method to build DTOs for keys that were not found. - /// used for serialization. - private async Task EnsureRawRows( - string dbName, - string tableName, - IEnumerable keys, - Func, IDictionary> dtoBuilder, - JsonSerializerOptions options, - CancellationToken token - ) - { - var rows = await GetRawRows(dbName, tableName, new[] { "," }, token); - var existing = rows.Select(row => row.Key); - - var toCreate = keys.Except(existing); - if (!toCreate.Any()) - return; - log.LogInformation("Creating {Count} raw rows in CDF", toCreate.Count()); - - var createDtos = dtoBuilder(toCreate); - - await destination.InsertRawRowsAsync(dbName, tableName, createDtos, options, token); - } - - /// - /// Insert or update raw rows given by in table - /// given by and . - /// The dtoBuilder is called with chunks of 10000 rows, and finally with null to indicate that there are no more rows. - /// - /// Type of DTO to build - /// Name of database in CDF Raw - /// Name of table in CDF Raw - /// Method to build DTOs, called with existing rows. - /// used for serialization. - private async Task UpsertRawRows( - string dbName, - string tableName, - Func< - IEnumerable>>?, - IDictionary - > dtoBuilder, - JsonSerializerOptions? options, - CancellationToken token - ) - { - int count = 0; - async Task CallAndCreate(IEnumerable>>? rows) - { - var toUpsert = dtoBuilder(rows); - count += toUpsert.Count; - await destination.InsertRawRowsAsync(dbName, tableName, toUpsert, options, token); - } - - string? cursor = null; - do - { - try - { - var result = await destination.CogniteClient.Raw.ListRowsAsync< - Dictionary - >( - dbName, - tableName, - new RawRowQuery { Cursor = cursor, Limit = 10_000 }, - null, - token - ); - cursor = result.NextCursor; - - await CallAndCreate(result.Items); - } - catch (ResponseException ex) when (ex.Code == 404) - { - log.LogWarning("Table or database not found: {Message}", ex.Message); - break; - } - } while (cursor != null); - - await CallAndCreate(null); - - log.LogInformation("Updated or created {Count} rows in CDF Raw", count); - } - public async Task>>> GetRawRows( string dbName, string tableName, @@ -1519,90 +977,9 @@ await destination.InsertRawRowsAsync( token ); } - #endregion #region references - /// - /// Create the given list of relationships in CDF, handles duplicates. - /// - /// Relationships to create - private async Task PushReferencesChunk( - IEnumerable relationships, - CancellationToken token - ) - { - if (!relationships.Any()) - return 0; - try - { - await destination.CogniteClient.Relationships.CreateAsync(relationships, token); - return relationships.Count(); - } - catch (ResponseException ex) - { - if (ex.Duplicated.Any()) - { - var existing = new HashSet(); - foreach (var dict in ex.Duplicated) - { - if (dict.TryGetValue("externalId", out var value)) - { - if (value is MultiValue.String strValue) - { - existing.Add(strValue.Value); - } - } - } - if (!existing.Any()) - throw; - - relationships = relationships - .Where(rel => !existing.Contains(rel.ExternalId)) - .ToList(); - return await PushReferencesChunk(relationships, token); - } - else - { - throw; - } - } - } - - /// - /// Create the given list of relationships in CDF Raw, skips rows that already exist. - /// - /// Relationships to create. - private async Task PushRawReferences( - IEnumerable relationships, - BrowseReport report, - CancellationToken token - ) - { - if ( - config.RawMetadata?.Database == null - || config.RawMetadata.RelationshipsTable == null - ) - return; - - await EnsureRawRows( - config.RawMetadata.Database, - config.RawMetadata.RelationshipsTable, - relationships.Select(rel => rel.ExternalId), - ids => - { - var idSet = ids.ToHashSet(); - var creates = relationships - .Where(rel => idSet.Contains(rel.ExternalId)) - .ToDictionary(rel => rel.ExternalId); - report.RelationshipsCreated += creates.Count; - return creates; - }, - new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }, - token - ); - } - private async Task MarkReferencesAsDeleted( IEnumerable externalIds, CancellationToken token diff --git a/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs b/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs index f39783b4..54047b80 100644 --- a/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs @@ -1,5 +1,6 @@ using System.Collections.Generic; using System.Text.Json; +using System.Threading; using System.Threading.Tasks; using Cognite.OpcUa.Nodes; using Cognite.OpcUa.Pushers.Writers.Dtos; @@ -32,7 +33,8 @@ bool shouldUpdate Task PushReferences( string database, string table, - IEnumerable relationships + IEnumerable relationships, + CancellationToken token ); } } diff --git a/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs b/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs index db5777c3..bebeacc6 100644 --- a/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs @@ -1,14 +1,13 @@ using System.Collections.Generic; +using System.Threading; using System.Threading.Tasks; +using Cognite.OpcUa.Pushers.Writers.Dtos; using CogniteSdk; namespace Cognite.OpcUa.Pushers.Writers.Interfaces { public interface IRelationshipsWriter { - Task PushReferences( - IEnumerable relationships, - BrowseReport report - ); + Task PushReferences(IEnumerable relationships, CancellationToken token); } } diff --git a/Extractor/Pushers/Writers/RawWriter.cs b/Extractor/Pushers/Writers/RawWriter.cs index 9344b854..60400567 100644 --- a/Extractor/Pushers/Writers/RawWriter.cs +++ b/Extractor/Pushers/Writers/RawWriter.cs @@ -234,7 +234,8 @@ private async Task EnsureRows(string dbName, string tableName, IEnumerable PushReferences( string database, string table, - IEnumerable relationships + IEnumerable relationships, + CancellationToken token ) { var result = new Result { Created = 0, Updated = 0 }; diff --git a/Extractor/Pushers/Writers/RelationshipsWriter.cs b/Extractor/Pushers/Writers/RelationshipsWriter.cs index 654ddc25..ee6fe953 100644 --- a/Extractor/Pushers/Writers/RelationshipsWriter.cs +++ b/Extractor/Pushers/Writers/RelationshipsWriter.cs @@ -5,6 +5,7 @@ using Cognite.Extractor.Common; using Cognite.Extractor.Utils; using Cognite.OpcUa.Config; +using Cognite.OpcUa.Pushers.Writers.Dtos; using Cognite.OpcUa.Pushers.Writers.Interfaces; using CogniteSdk; using Microsoft.Extensions.Logging; @@ -14,13 +15,11 @@ namespace Cognite.OpcUa.Pushers.Writers public class RelationshipsWriter : IRelationshipsWriter { private readonly ILogger log; - private FullConfig config; + private readonly FullConfig config; private readonly CogniteDestination destination; - private readonly CancellationToken token; public RelationshipsWriter( ILogger logger, - CancellationToken token, CogniteDestination destination, FullConfig config ) @@ -28,24 +27,19 @@ FullConfig config this.log = logger; this.config = config; this.destination = destination; - this.token = token; } - public async Task PushReferences( - IEnumerable relationships, - BrowseReport report - ) + public async Task PushReferences(IEnumerable relationships, CancellationToken token) { + var result = new Result{ Created = 0, Updated = 0 }; var counts = await Task.WhenAll( relationships.ChunkBy(1000).Select(chunk => PushReferencesChunk(chunk, token)) ); - report.RelationshipsCreated += counts.Sum(); + result.Created += counts.Sum(); + return result; } - private async Task PushReferencesChunk( - IEnumerable relationships, - CancellationToken token - ) + private async Task PushReferencesChunk(IEnumerable relationships, CancellationToken token) { if (!relationships.Any()) return 0; diff --git a/Extractor/Pushers/Writers/TimeseriesWriter.cs b/Extractor/Pushers/Writers/TimeseriesWriter.cs index a96b16ac..13c851f5 100644 --- a/Extractor/Pushers/Writers/TimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/TimeseriesWriter.cs @@ -143,13 +143,7 @@ bool createMinimalTimeseries return timeseries.Results; } - private async Task UpdateTimeseries( - UAExtractor extractor, - IDictionary tsMap, - IEnumerable timeseries, - IDictionary nodeToAssetIds, - TypeUpdateConfig update, - Result result) + private async Task UpdateTimeseries(UAExtractor extractor, IDictionary tsMap, IEnumerable timeseries, IDictionary nodeToAssetIds, TypeUpdateConfig update, Result result) { var updates = new List(); var existing = timeseries.ToDictionary(asset => asset.ExternalId); diff --git a/Extractor/Pushers/Writers/WriterUtils.cs b/Extractor/Pushers/Writers/WriterUtils.cs index 95a11823..00053462 100644 --- a/Extractor/Pushers/Writers/WriterUtils.cs +++ b/Extractor/Pushers/Writers/WriterUtils.cs @@ -36,7 +36,6 @@ public static void AddWriters(this IServiceCollection services, CancellationToke ), new RelationshipsWriter( provider.GetRequiredService>(), - token, dest, config ) From 5722538483e61827bbdc1fb437c5f82a9c3df1d3 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Wed, 5 Jul 2023 13:27:48 +0200 Subject: [PATCH 08/26] refactor: remove cancellation token from constructors --- Extractor/Pushers/CDFPusher.cs | 44 +++++++------- Extractor/Pushers/Writers/AssetsWriter.cs | 59 +++++-------------- .../Writers/Interfaces/IAssetsWriter.cs | 6 +- .../Pushers/Writers/Interfaces/IRawWriter.cs | 6 +- .../Writers/Interfaces/ITimeseriesWriter.cs | 4 +- Extractor/Pushers/Writers/RawWriter.cs | 34 +++++------ Extractor/Pushers/Writers/TimeseriesWriter.cs | 21 +++---- Extractor/Pushers/Writers/WriterUtils.cs | 3 - 8 files changed, 69 insertions(+), 108 deletions(-) diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index ef4fc7e9..eee305be 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -339,13 +339,7 @@ CancellationToken token { if (fdmDestination != null) { - await fdmDestination.PushNodes( - objects, - variables, - references, - Extractor, - token - ); + await fdmDestination.PushNodes(objects, variables, references, Extractor, token); } return result; } @@ -370,15 +364,10 @@ await fdmDestination.PushNodes( if (pushCleanAssets && assetsMap.Any()) { - await PushCleanAssets(assetsMap, update.Objects, report, result); + await PushCleanAssets(assetsMap, update.Objects, report, result, token); } - isTimeseriesPushed = await PushCleanTimeseries( - timeseriesMap, - update.Variables, - report, - result - ); + isTimeseriesPushed = await PushCleanTimeseries(timeseriesMap, update.Variables, report, result, token); var tasks = new List(); @@ -389,12 +378,12 @@ await fdmDestination.PushNodes( if (!pushCleanAssets && assetsMap.Any()) { - tasks.Add(PushRawAssets(assetsMap, update.Objects, report, result)); + tasks.Add(PushRawAssets(assetsMap, update.Objects, report, result, token)); } if (!pushCleanTimeseries && timeseriesMap.Any()) { - tasks.Add(PushRawTimeseries(timeseriesMap, update.Variables, report, result)); + tasks.Add(PushRawTimeseries(timeseriesMap, update.Variables, report, result, token)); } tasks.Add(PushReferences(references, report, result, token)); @@ -464,12 +453,15 @@ private async Task PushCleanAssets( ConcurrentDictionary assetsMap, TypeUpdateConfig update, BrowseReport report, - PushResult result + PushResult result, + CancellationToken token ) { try { - await cdfWriter.assets.PushNodes(Extractor, assetsMap, nodeToAssetIds, update, report); + var _result = await cdfWriter.assets.PushNodes(Extractor, assetsMap, nodeToAssetIds, update, token); + report.AssetsCreated += _result.Created; + report.AssetsUpdated += _result.Updated; } catch { @@ -482,12 +474,13 @@ private async Task PushCleanTimeseries( ConcurrentDictionary timeseriesMap, TypeUpdateConfig update, BrowseReport report, - PushResult result + PushResult result, + CancellationToken token ) { try { - var _result = await cdfWriter.timeseries.PushVariables(Extractor, timeseriesMap, nodeToAssetIds, mismatchedTimeseries, update); + var _result = await cdfWriter.timeseries.PushVariables(Extractor, timeseriesMap, nodeToAssetIds, mismatchedTimeseries, update, token); var skipMetadata = config.SkipMetadata; var createMinimal = !pushCleanTimeseries || skipMetadata; if (createMinimal) @@ -763,7 +756,8 @@ private async Task PushRawAssets( ConcurrentDictionary assetsMap, TypeUpdateConfig update, BrowseReport report, - PushResult result + PushResult result, + CancellationToken token ) { try @@ -774,7 +768,8 @@ PushResult result config.RawMetadata!.AssetsTable!, assetsMap, ConverterType.Node, - update.AnyUpdate + update.AnyUpdate, + token ); report.AssetsCreated += _result.Created; report.AssetsUpdated += _result.Updated; @@ -842,7 +837,7 @@ await MarkRawRowsAsDeleted( /// /// Timeseries to push /// Configuration for which fields, if any, to update in CDF - private async Task PushRawTimeseries(ConcurrentDictionary tsIds, TypeUpdateConfig update, BrowseReport report, PushResult result) + private async Task PushRawTimeseries(ConcurrentDictionary tsIds, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) { try { @@ -856,7 +851,8 @@ private async Task PushRawTimeseries(ConcurrentDictionary ts config.RawMetadata!.TimeseriesTable!, toPushMeta, ConverterType.Variable, - update.AnyUpdate && !config.SkipMetadata + update.AnyUpdate && !config.SkipMetadata, + token ); report.TimeSeriesCreated += _result.Created; report.TimeSeriesUpdated += _result.Updated; diff --git a/Extractor/Pushers/Writers/AssetsWriter.cs b/Extractor/Pushers/Writers/AssetsWriter.cs index 4fc6eb8a..b8f97a3c 100644 --- a/Extractor/Pushers/Writers/AssetsWriter.cs +++ b/Extractor/Pushers/Writers/AssetsWriter.cs @@ -8,6 +8,7 @@ using Cognite.Extractor.Utils; using Cognite.OpcUa.Config; using Cognite.OpcUa.Nodes; +using Cognite.OpcUa.Pushers.Writers.Dtos; using Cognite.OpcUa.Pushers.Writers.Interfaces; using CogniteSdk; using Microsoft.Extensions.Logging; @@ -20,41 +21,27 @@ public class AssetsWriter : IAssetsWriter private readonly ILogger log; private readonly FullConfig config; private readonly CogniteDestination destination; - private readonly CancellationToken token; - public AssetsWriter( - ILogger logger, - CancellationToken token, - CogniteDestination destination, - FullConfig config) + public AssetsWriter(ILogger logger, CogniteDestination destination, FullConfig config) { this.log = logger; this.config = config; this.destination = destination; - this.token = token; } - public async Task PushNodes( - UAExtractor extractor, - ConcurrentDictionary nodes, - IDictionary nodeToAssetIds, - TypeUpdateConfig update, - BrowseReport report - ) + public async Task PushNodes(UAExtractor extractor, ConcurrentDictionary nodes, IDictionary nodeToAssetIds, TypeUpdateConfig update, CancellationToken token) { - var assets = await CreateAssets(extractor, nodes, nodeToAssetIds, report); + var result = new Result { Created = 0, Updated = 0 }; + var assets = await CreateAssets(extractor, nodes, nodeToAssetIds, result, token); if (update.AnyUpdate) { - await UpdateAssets(extractor, nodes, assets, update, report); + await UpdateAssets(extractor, nodes, assets, update, result, token); } + return result; } - private async Task> CreateAssets( - UAExtractor extractor, - IDictionary assetMap, - IDictionary nodeToAssetIds, - BrowseReport report) + private async Task> CreateAssets( UAExtractor extractor, IDictionary assetMap, IDictionary nodeToAssetIds, Result result, CancellationToken token) { var assets = new List(); var maxSize = config.Cognite?.CdfChunking.Assets ?? 1000; @@ -70,7 +57,7 @@ private async Task> CreateAssets( config.Cognite?.DataSet?.Id, config.Cognite?.MetadataMapping?.Assets)) .Where(asset => asset != null); - report.AssetsCreated += creates.Count(); + result.Created += creates.Count(); return creates; }, RetryMode.None, SanitationMode.Clean, token); @@ -89,13 +76,7 @@ private async Task> CreateAssets( return assets; } - private async Task UpdateAssets( - UAExtractor extractor, - IDictionary assetMap, - IEnumerable assets, - TypeUpdateConfig update, - BrowseReport report - ) + private async Task UpdateAssets(UAExtractor extractor, IDictionary assetMap, IEnumerable assets, TypeUpdateConfig update, Result result, CancellationToken token) { var updates = new List(); var existing = assets.ToDictionary(asset => asset.ExternalId); @@ -103,13 +84,7 @@ BrowseReport report { if (existing.TryGetValue(kvp.Key, out var asset)) { - var assetUpdate = PusherUtils.GetAssetUpdate( - config, - asset, - kvp.Value, - extractor, - update - ); + var assetUpdate = PusherUtils.GetAssetUpdate(config, asset, kvp.Value, extractor, update); if (assetUpdate == null) continue; @@ -126,18 +101,14 @@ BrowseReport report } if (updates.Any()) { - var res = await destination.UpdateAssetsAsync( - updates, - RetryMode.OnError, - SanitationMode.Clean, - token - ); + var res = await destination.UpdateAssetsAsync(updates, RetryMode.OnError, SanitationMode.Clean, token); log.LogResult(res, RequestType.UpdateAssets, false); res.ThrowOnFatal(); - report.AssetsUpdated += res.Results?.Count() ?? 0; + result.Updated += res.Results?.Count() ?? 0; } - } } + } + } } diff --git a/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs b/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs index af831cf6..bfddbce6 100644 --- a/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs @@ -1,20 +1,22 @@ using System.Collections.Concurrent; using System.Collections.Generic; +using System.Threading; using System.Threading.Tasks; using Cognite.OpcUa.Config; using Cognite.OpcUa.Nodes; +using Cognite.OpcUa.Pushers.Writers.Dtos; using Opc.Ua; namespace Cognite.OpcUa.Pushers.Writers.Interfaces { public interface IAssetsWriter { - Task PushNodes( + Task PushNodes( UAExtractor extractor, ConcurrentDictionary assetMap, IDictionary nodeToAssetIds, TypeUpdateConfig config, - BrowseReport report + CancellationToken token ); } } diff --git a/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs b/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs index 54047b80..71730d8b 100644 --- a/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs @@ -17,7 +17,8 @@ public interface IRawWriter Task>>> GetRawRows( string dbName, string tableName, - IEnumerable? columns + IEnumerable? columns, + CancellationToken token ); Task PushNodes( @@ -26,7 +27,8 @@ Task PushNodes( string table, IDictionary rows, ConverterType converter, - bool shouldUpdate + bool shouldUpdate, + CancellationToken token ) where T : BaseUANode; diff --git a/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs b/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs index 177b130a..710a15cf 100644 --- a/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs @@ -1,5 +1,6 @@ using System.Collections.Concurrent; using System.Collections.Generic; +using System.Threading; using System.Threading.Tasks; using Cognite.OpcUa.Config; using Cognite.OpcUa.Nodes; @@ -15,7 +16,8 @@ Task PushVariables( ConcurrentDictionary timeseriesMap, IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, - TypeUpdateConfig update + TypeUpdateConfig update, + CancellationToken token ); } } diff --git a/Extractor/Pushers/Writers/RawWriter.cs b/Extractor/Pushers/Writers/RawWriter.cs index 60400567..ef72ce78 100644 --- a/Extractor/Pushers/Writers/RawWriter.cs +++ b/Extractor/Pushers/Writers/RawWriter.cs @@ -18,21 +18,12 @@ namespace Cognite.OpcUa.Pushers.Writers public class RawWriter : IRawWriter { private readonly ILogger log; - - private CancellationToken token { get; } - private FullConfig config { get; } private CogniteDestination destination { get; } - public RawWriter( - ILogger log, - CancellationToken token, - CogniteDestination destination, - FullConfig config - ) + public RawWriter(ILogger log, CogniteDestination destination, FullConfig config) { this.log = log; - this.token = token; this.config = config; this.destination = destination; } @@ -40,7 +31,8 @@ FullConfig config public async Task>>> GetRawRows( string dbName, string tableName, - IEnumerable? columns + IEnumerable? columns, + CancellationToken token ) { string? cursor = null; @@ -75,22 +67,22 @@ public async Task>>> GetRawRo return rows; } - public async Task PushNodes(UAExtractor extractor, string database, string table, IDictionary rows, ConverterType converter, bool shouldUpdate) where T : BaseUANode + public async Task PushNodes(UAExtractor extractor, string database, string table, IDictionary rows, ConverterType converter, bool shouldUpdate, CancellationToken token) where T : BaseUANode { var result = new Result { Created = 0, Updated = 0 }; if (shouldUpdate) { - await Update(extractor, database, table, rows, converter, result); + await Update(extractor, database, table, rows, converter, result, token); } else { - await Create(extractor, database, table, rows, converter, result); + await Create(extractor, database, table, rows, converter, result, token); } return result; } - private async Task Update(UAExtractor extractor, string database, string table, IDictionary dataSet, ConverterType converter, Result result) where T : BaseUANode + private async Task Update(UAExtractor extractor, string database, string table, IDictionary dataSet, ConverterType converter, Result result, CancellationToken token) where T : BaseUANode { await UpsertRows( database, @@ -147,7 +139,7 @@ await UpsertRows( ); } - private async Task Create(UAExtractor extractor, string database, string table, IDictionary dataMap, ConverterType converter, Result result) where T : BaseUANode + private async Task Create(UAExtractor extractor, string database, string table, IDictionary dataMap, ConverterType converter, Result result, CancellationToken token) where T : BaseUANode { await EnsureRows( database, @@ -163,7 +155,8 @@ await EnsureRows( result.Created += creates.Count; return creates; }, - new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase } + new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }, + token ); } @@ -216,9 +209,9 @@ async Task CallAndCreate(IEnumerable>>? r log.LogInformation("Updated or created {Count} rows in CDF Raw", count); } - private async Task EnsureRows(string dbName, string tableName, IEnumerable keys, Func, IDictionary> dtoBuilder, JsonSerializerOptions options) + private async Task EnsureRows(string dbName, string tableName, IEnumerable keys, Func, IDictionary> dtoBuilder, JsonSerializerOptions options, CancellationToken token) { - var rows = await GetRawRows(dbName, tableName, new[] { "," }); + var rows = await GetRawRows(dbName, tableName, new[] { "," }, token); var existing = rows.Select(row => row.Key); var toCreate = keys.Except(existing); @@ -252,7 +245,8 @@ await EnsureRows( result.Created += creates.Count; return creates; }, - new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase } + new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }, + token ); return result; } diff --git a/Extractor/Pushers/Writers/TimeseriesWriter.cs b/Extractor/Pushers/Writers/TimeseriesWriter.cs index 13c851f5..1ead4f0d 100644 --- a/Extractor/Pushers/Writers/TimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/TimeseriesWriter.cs @@ -22,21 +22,15 @@ public class TimeseriesWriter : ITimeseriesWriter private ILogger log; private readonly FullConfig config; private readonly CogniteDestination destination; - private readonly CancellationToken token; private bool pushCleanTimeseries => string.IsNullOrWhiteSpace(config.Cognite?.RawMetadata?.Database) && string.IsNullOrWhiteSpace(config.Cognite?.RawMetadata?.TimeseriesTable); - public TimeseriesWriter( - ILogger logger, - CancellationToken token, - CogniteDestination destination, - FullConfig config) + public TimeseriesWriter(ILogger logger, CogniteDestination destination, FullConfig config) { this.log = logger; this.config = config; this.destination = destination; - this.token = token; } public async Task PushVariables( @@ -44,7 +38,8 @@ public async Task PushVariables( ConcurrentDictionary timeseriesMap, IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, - TypeUpdateConfig update + TypeUpdateConfig update, + CancellationToken token ) { var result = new Result { Created = 0, Updated = 0 }; @@ -55,7 +50,8 @@ TypeUpdateConfig update nodeToAssetIds, mismatchedTimeseries, result, - !pushCleanTimeseries || (skipMeta.HasValue ? skipMeta.Value : false) + !pushCleanTimeseries || (skipMeta.HasValue ? skipMeta.Value : false), + token ); var toPushMeta = timeseriesMap @@ -64,7 +60,7 @@ TypeUpdateConfig update if (update.AnyUpdate && toPushMeta.Any() && pushCleanTimeseries) { - await UpdateTimeseries(extractor, toPushMeta, timeseries, nodeToAssetIds, update, result); + await UpdateTimeseries(extractor, toPushMeta, timeseries, nodeToAssetIds, update, result, token); } return result; } @@ -75,7 +71,8 @@ private async Task> CreateTimeseries( IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, Result result, - bool createMinimalTimeseries + bool createMinimalTimeseries, + CancellationToken token ) { var timeseries = await destination.GetOrCreateTimeSeriesAsync( @@ -143,7 +140,7 @@ bool createMinimalTimeseries return timeseries.Results; } - private async Task UpdateTimeseries(UAExtractor extractor, IDictionary tsMap, IEnumerable timeseries, IDictionary nodeToAssetIds, TypeUpdateConfig update, Result result) + private async Task UpdateTimeseries(UAExtractor extractor, IDictionary tsMap, IEnumerable timeseries, IDictionary nodeToAssetIds, TypeUpdateConfig update, Result result, CancellationToken token) { var updates = new List(); var existing = timeseries.ToDictionary(asset => asset.ExternalId); diff --git a/Extractor/Pushers/Writers/WriterUtils.cs b/Extractor/Pushers/Writers/WriterUtils.cs index 00053462..cf76bdd1 100644 --- a/Extractor/Pushers/Writers/WriterUtils.cs +++ b/Extractor/Pushers/Writers/WriterUtils.cs @@ -18,19 +18,16 @@ public static void AddWriters(this IServiceCollection services, CancellationToke return new CDFWriter( new RawWriter( provider.GetRequiredService>(), - token, dest, config ), new TimeseriesWriter( provider.GetRequiredService>(), - token, dest, config ), new AssetsWriter( provider.GetRequiredService>(), - token, dest, config ), From f32c2796cf2f559655d56f991127be2f4f3117f5 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Wed, 5 Jul 2023 13:54:22 +0200 Subject: [PATCH 09/26] fix: fix config --- Extractor/Pushers/Writers/AssetsWriter.cs | 2 +- config/opc.ua.net.extractor.Config.xml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Extractor/Pushers/Writers/AssetsWriter.cs b/Extractor/Pushers/Writers/AssetsWriter.cs index b8f97a3c..9050e285 100644 --- a/Extractor/Pushers/Writers/AssetsWriter.cs +++ b/Extractor/Pushers/Writers/AssetsWriter.cs @@ -41,7 +41,7 @@ public async Task PushNodes(UAExtractor extractor, ConcurrentDictionary return result; } - private async Task> CreateAssets( UAExtractor extractor, IDictionary assetMap, IDictionary nodeToAssetIds, Result result, CancellationToken token) + private async Task> CreateAssets(UAExtractor extractor, IDictionary assetMap, IDictionary nodeToAssetIds, Result result, CancellationToken token) { var assets = new List(); var maxSize = config.Cognite?.CdfChunking.Assets ?? 1000; diff --git a/config/opc.ua.net.extractor.Config.xml b/config/opc.ua.net.extractor.Config.xml index 88dbbc30..507711d3 100644 --- a/config/opc.ua.net.extractor.Config.xml +++ b/config/opc.ua.net.extractor.Config.xml @@ -12,8 +12,8 @@ - Directory - ./certificates + X509Store + CurrentUser\My CN=Opcua-extractor, C=NO, S=Oslo, O=Cognite, DC=localhost From f9ce232a79e78b001b818f88025123ac74f90289 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Thu, 6 Jul 2023 12:05:57 +0200 Subject: [PATCH 10/26] feat: integrate new config pattern --- Extractor/BrowseCallback.cs | 5 + Extractor/Config/CogniteConfig.cs | 21 ++ Extractor/NodeSources/NodeSetSource.cs | 2 +- Extractor/NodeSources/UANodeSource.cs | 2 +- Extractor/Nodes/UADataType.cs | 2 +- Extractor/Pushers/CDFPusher.cs | 261 ++++++++---------- Extractor/Pushers/FDM/DMSValueConverter.cs | 3 - Extractor/Pushers/FDM/FDMWriter.cs | 8 +- Extractor/Pushers/FDM/NodeTypeCollector.cs | 5 +- Extractor/Pushers/FDM/TypeHierarchyBuilder.cs | 13 +- Extractor/Pushers/IPusher.cs | 3 + Extractor/Pushers/Writers/AssetsWriter.cs | 3 +- Extractor/Pushers/Writers/CDFWriter.cs | 21 +- .../Writers/Interfaces/IAssetsWriter.cs | 3 +- .../Pushers/Writers/Interfaces/ICDFWriter.cs | 9 +- .../Writers/Interfaces/ITimeseriesWriter.cs | 3 +- .../Writers/MinimalTimeseriesWriter.cs | 18 ++ Extractor/Pushers/Writers/TimeseriesWriter.cs | 16 +- Extractor/Pushers/Writers/WriterUtils.cs | 11 +- ExtractorLauncher/ExtractorStarter.cs | 39 +++ Test/CDFMockHandler.cs | 6 +- Test/Integration/NodeExtractionTests.cs | 45 ++- Test/Unit/CDFPusherTest.cs | 219 +++++++++++---- Test/Unit/DeleteTest.cs | 31 ++- Test/Unit/FDMTests.cs | 29 +- Test/Unit/MQTTBridgeTests.cs | 14 +- Test/Unit/MQTTPusherTest.cs | 22 +- 27 files changed, 517 insertions(+), 297 deletions(-) create mode 100644 Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs diff --git a/Extractor/BrowseCallback.cs b/Extractor/BrowseCallback.cs index 68433e69..d25432fe 100644 --- a/Extractor/BrowseCallback.cs +++ b/Extractor/BrowseCallback.cs @@ -27,10 +27,15 @@ public class BrowseReport { public string? IdPrefix { get; set; } public int AssetsUpdated { get; set; } + public int RawAssetsUpdated { get; set; } public int AssetsCreated { get; set; } + public int RawAssetsCreated { get; set; } public int TimeSeriesUpdated { get; set; } + public int RawTimeseriesUpdated { get; set; } public int TimeSeriesCreated { get; set; } + public int RawTimeseriesCreated { get; set; } public int RelationshipsCreated { get; set; } + public int RawRelationshipsCreated { get; set; } public int MinimalTimeSeriesCreated { get; set; } public string? RawDatabase { get; set; } public string? AssetsTable { get; set; } diff --git a/Extractor/Config/CogniteConfig.cs b/Extractor/Config/CogniteConfig.cs index 969cb981..26ab3b81 100644 --- a/Extractor/Config/CogniteConfig.cs +++ b/Extractor/Config/CogniteConfig.cs @@ -115,6 +115,8 @@ public double? NonFiniteReplacement /// Configuration for writing to a custom OPC-UA flexible data model. /// public FdmDestinationConfig? FlexibleDataModels { get; set; } + + public MetadataTargetsConfig? MetadataTargets { get; set; } } public class RawMetadataConfig { @@ -136,6 +138,25 @@ public class RawMetadataConfig /// public string? RelationshipsTable { get; set; } } + public class MetadataTargetsConfig + { + public RawMetadataTargetConfig? RawMetadata { get; set; } + public CleanMetadataTargetConfig? CleanMetadata { get; set; } + public FdmDestinationConfig? FlexibleDataModels { get; set; } + } + public class RawMetadataTargetConfig + { + public string? Database { get; set; } + public string? AssetsTable { get; set; } + public string? TimeseriesTable { get; set; } + public string? RelationshipsTable { get; set; } + } + public class CleanMetadataTargetConfig + { + public bool Assets { get; set; } = true; + public bool Timeseries { get; set; } = true; + public bool Relationships { get; set; } = true; + } public class MetadataMapConfig { public Dictionary? Assets { get; set; } diff --git a/Extractor/NodeSources/NodeSetSource.cs b/Extractor/NodeSources/NodeSetSource.cs index 53baff5f..4922c320 100644 --- a/Extractor/NodeSources/NodeSetSource.cs +++ b/Extractor/NodeSources/NodeSetSource.cs @@ -349,7 +349,7 @@ private async Task InitNodes(IEnumerable nodes, CancellationToken to await InitNodes(NodeList, token); - var usesFdm = Config.Cognite?.FlexibleDataModels?.Enabled ?? false; + var usesFdm = Config.Cognite?.MetadataTargets?.FlexibleDataModels?.Enabled ?? false; if (Config.Extraction.Relationships.Enabled) { diff --git a/Extractor/NodeSources/UANodeSource.cs b/Extractor/NodeSources/UANodeSource.cs index 1d10d28c..3c12a746 100644 --- a/Extractor/NodeSources/UANodeSource.cs +++ b/Extractor/NodeSources/UANodeSource.cs @@ -110,7 +110,7 @@ private async Task InitNodes(IEnumerable nodes, CancellationToken to await InitNodes(NodeList, token); - var usesFdm = Config.Cognite?.FlexibleDataModels?.Enabled ?? false; + var usesFdm = Config.Cognite?.MetadataTargets?.FlexibleDataModels?.Enabled ?? false; if (Config.Extraction.Relationships.Enabled) { diff --git a/Extractor/Nodes/UADataType.cs b/Extractor/Nodes/UADataType.cs index 727e39b8..97cb27d5 100644 --- a/Extractor/Nodes/UADataType.cs +++ b/Extractor/Nodes/UADataType.cs @@ -38,7 +38,7 @@ public DataTypeAttributes() : base(NodeClass.DataType) public override IEnumerable GetAttributeSet(FullConfig config) { - if (config.Cognite?.FlexibleDataModels?.Enabled ?? false) + if (config.Cognite?.MetadataTargets?.FlexibleDataModels?.Enabled ?? false) { yield return Attributes.DataTypeDefinition; } diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index eee305be..7029cea3 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -74,15 +74,8 @@ public sealed class CDFPusher : IPusher private readonly BrowseCallback? callback; private readonly FDMWriter? fdmDestination; - private bool pushCleanAssets => - string.IsNullOrWhiteSpace(config.RawMetadata?.Database) - && string.IsNullOrWhiteSpace(config.RawMetadata?.AssetsTable); - private bool pushCleanTimeseries => - string.IsNullOrWhiteSpace(config.RawMetadata?.Database) - && string.IsNullOrWhiteSpace(config.RawMetadata?.TimeseriesTable); - private bool pushCleanReferences => - string.IsNullOrWhiteSpace(config.RawMetadata?.Database) - && string.IsNullOrWhiteSpace(config.RawMetadata?.RelationshipsTable); + private RawMetadataTargetConfig? RawMetadataTargetConfig => fullConfig.Cognite?.MetadataTargets?.RawMetadata; + private CleanMetadataTargetConfig? CleanMetadataTargetConfig => fullConfig.Cognite?.MetadataTargets?.CleanMetadata; public CDFPusher( @@ -103,7 +96,7 @@ public CDFPusher( { callback = new BrowseCallback(destination, config.BrowseCallback, log); } - if (config.FlexibleDataModels != null && config.FlexibleDataModels.Enabled) + if (config.MetadataTargets?.FlexibleDataModels != null && (config.MetadataTargets?.FlexibleDataModels.Enabled ?? false)) { fdmDestination = new FDMWriter(provider.GetRequiredService(), destination, provider.GetRequiredService>()); @@ -209,8 +202,6 @@ public CDFPusher( } } - - result.ThrowOnFatal(); log.LogDebug("Successfully pushed {Real} / {Total} points to CDF", realCount, count); @@ -302,22 +293,17 @@ public CDFPusher( /// List of variables to be synchronized /// Configuration of what fields, if any, should be updated. /// True if no operation failed unexpectedly - public async Task PushNodes( - IEnumerable objects, - IEnumerable variables, - IEnumerable references, - UpdateConfig update, - CancellationToken token - ) + public async Task PushNodes(IEnumerable objects, + IEnumerable variables, IEnumerable references, UpdateConfig update, CancellationToken token) { var result = new PushResult(); var report = new BrowseReport { IdPrefix = fullConfig.Extraction.IdPrefix, - RawDatabase = config.RawMetadata?.Database, - AssetsTable = config.RawMetadata?.AssetsTable, - TimeSeriesTable = config.RawMetadata?.TimeseriesTable, - RelationshipsTable = config.RawMetadata?.RelationshipsTable + RawDatabase = RawMetadataTargetConfig?.Database, + AssetsTable = RawMetadataTargetConfig?.AssetsTable, + TimeSeriesTable = RawMetadataTargetConfig?.TimeseriesTable, + RelationshipsTable = RawMetadataTargetConfig?.RelationshipsTable }; if (!variables.Any() && !objects.Any() && !references.Any()) @@ -358,41 +344,21 @@ CancellationToken token return result; } - var assetsMap = MapAssets(objects); - var timeseriesMap = MapTimeseries(variables); - bool isTimeseriesPushed = true; - - if (pushCleanAssets && assetsMap.Any()) - { - await PushCleanAssets(assetsMap, update.Objects, report, result, token); - } - - isTimeseriesPushed = await PushCleanTimeseries(timeseriesMap, update.Variables, report, result, token); - var tasks = new List(); - if (isTimeseriesPushed && fdmDestination != null) + tasks.Add(PushAssets(objects, update.Objects, report, result, token)); + tasks.Add(PushTimeseries(variables, update.Variables, report, result, token)); + tasks.Add(PushReferences(references, report, result, token)); + if (fdmDestination != null) { tasks.Add(PushFdm(objects, variables, references, result, token)); } - if (!pushCleanAssets && assetsMap.Any()) - { - tasks.Add(PushRawAssets(assetsMap, update.Objects, report, result, token)); - } - - if (!pushCleanTimeseries && timeseriesMap.Any()) - { - tasks.Add(PushRawTimeseries(timeseriesMap, update.Variables, report, result, token)); - } - - tasks.Add(PushReferences(references, report, result, token)); - await Task.WhenAll(tasks); log.LogInformation("Finish pushing nodes to CDF"); - if (result.Objects && result.References && result.Variables) + if (result.Objects && result.References && result.Variables && result.RawObjects && result.RawVariables && result.RawReferences) { if (callback != null) { @@ -407,13 +373,7 @@ CancellationToken token return result; } - private async Task PushFdm( - IEnumerable objects, - IEnumerable variables, - IEnumerable references, - PushResult result, - CancellationToken token - ) + private async Task PushFdm(IEnumerable objects, IEnumerable variables, IEnumerable references, PushResult result, CancellationToken token) { bool pushResult = true; try @@ -440,26 +400,64 @@ private ConcurrentDictionary MapAssets(IEnumerable MapTimeseries( - IEnumerable variables - ) + private ConcurrentDictionary MapTimeseries(IEnumerable variables) { return new ConcurrentDictionary( variables.ToDictionary(ts => ts.GetUniqueId(Extractor)!) ); } - private async Task PushCleanAssets( - ConcurrentDictionary assetsMap, - TypeUpdateConfig update, - BrowseReport report, - PushResult result, - CancellationToken token - ) + private async Task PushAssets(IEnumerable objects, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) + { + if (!objects.Any()) return; + + var assetsMap = MapAssets(objects); + if (CleanMetadataTargetConfig?.Assets ?? false) + { + await PushCleanAssets(assetsMap, update, report, result, token); + } + if (RawMetadataTargetConfig?.Database != null && RawMetadataTargetConfig?.AssetsTable != null) + { + await PushRawAssets(assetsMap, update, report, result, token); + } + } + + private async Task PushTimeseries(IEnumerable variables, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) + { + if (!variables.Any()) return; + + var timeseriesMap = MapTimeseries(variables); + await PushCleanTimeseries(timeseriesMap, update, report, result, token); + if ((RawMetadataTargetConfig?.Database != null) && (RawMetadataTargetConfig?.TimeseriesTable != null)) + { + await PushRawTimeseries(timeseriesMap, update, report, result, token); + } + } + + private async Task PushReferences(IEnumerable references, BrowseReport report, PushResult result, CancellationToken token) + { + if (!references.Any()) return; + + var relationships = references + .Select(reference => reference.ToRelationship(config.DataSet?.Id, Extractor)) + .DistinctBy(rel => rel.ExternalId); + + if (CleanMetadataTargetConfig?.Relationships ?? false) + { + await PushCleanReferences(relationships, report, result, token); + } + + if (RawMetadataTargetConfig?.Database != null && RawMetadataTargetConfig?.RelationshipsTable != null) + { + await PushRawReferences(relationships, report, result, token); + } + } + + private async Task PushCleanAssets(IDictionary assetsMap, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) { try { - var _result = await cdfWriter.assets.PushNodes(Extractor, assetsMap, nodeToAssetIds, update, token); + var _result = await cdfWriter.Assets.PushNodes(Extractor, assetsMap, nodeToAssetIds, update, token); report.AssetsCreated += _result.Created; report.AssetsUpdated += _result.Updated; } @@ -467,22 +465,15 @@ CancellationToken token { result.Objects = false; } - return result.Objects; } - private async Task PushCleanTimeseries( - ConcurrentDictionary timeseriesMap, - TypeUpdateConfig update, - BrowseReport report, - PushResult result, - CancellationToken token - ) + private async Task PushCleanTimeseries(IDictionary timeseriesMap, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) { try { - var _result = await cdfWriter.timeseries.PushVariables(Extractor, timeseriesMap, nodeToAssetIds, mismatchedTimeseries, update, token); - var skipMetadata = config.SkipMetadata; - var createMinimal = !pushCleanTimeseries || skipMetadata; + var createMinimal = !(CleanMetadataTargetConfig?.Timeseries ?? false); + var writer = createMinimal ? cdfWriter.MinimalTimeseries : cdfWriter.Timeseries; + var _result = await writer.PushVariables(Extractor, timeseriesMap, nodeToAssetIds, mismatchedTimeseries, update, token); if (createMinimal) { report.MinimalTimeSeriesCreated += _result.Created; @@ -497,8 +488,33 @@ CancellationToken token { result.Variables = false; } + } + + private async Task PushCleanReferences(IEnumerable relationships, BrowseReport report, PushResult result, CancellationToken token) + { + try + { + var _result = await cdfWriter.Relationships.PushReferences(relationships, token); + report.RelationshipsCreated += _result.Created; + } + catch (Exception e) + { + log.LogError(e, "Failed to ensure relationships"); + result.References = false; + } + } - return result.Variables; + private async Task PushRawReferences(IEnumerable relationships, BrowseReport report, PushResult result, CancellationToken token) + { + try + { + var _result = await cdfWriter.Raw.PushReferences(RawMetadataTargetConfig!.Database!, RawMetadataTargetConfig!.RelationshipsTable!, relationships, token); + report.RawRelationshipsCreated += _result.Created; + } catch (Exception e) + { + log.LogError(e, "Failed to ensure raw relationships"); + result.RawReferences = false; + } } /// @@ -681,39 +697,6 @@ await destination.CogniteClient.Events.ListAsync( return true; } - /// - /// Push list of references as relationships to CDF. - /// - /// List of references to push - /// True if nothing failed unexpectedly - private async Task PushReferences( - IEnumerable references, - BrowseReport report, - PushResult result, - CancellationToken token - ) - { - try - { - if (references == null || !references.Any()) - return; - - var relationships = references - .Select(reference => reference.ToRelationship(config.DataSet?.Id, Extractor)) - .DistinctBy(rel => rel.ExternalId); - - var _result = pushCleanReferences ? - await cdfWriter.relationships.PushReferences(relationships, token) : - await cdfWriter.raw.PushReferences(config.RawMetadata!.Database!, config.RawMetadata!.RelationshipsTable!, relationships, token); - report.RelationshipsCreated += _result.Created; - } - catch (Exception e) - { - log.LogError(e, "Failed to ensure references"); - result.References = false; - } - } - public async Task ExecuteDeletes(DeletedNodes deletes, CancellationToken token) { if (fullConfig.DryRun) @@ -762,22 +745,22 @@ CancellationToken token { try { - var _result = await cdfWriter.raw.PushNodes( + var _result = await cdfWriter.Raw.PushNodes( Extractor, - config.RawMetadata!.Database!, - config.RawMetadata!.AssetsTable!, + RawMetadataTargetConfig!.Database!, + RawMetadataTargetConfig!.AssetsTable!, assetsMap, ConverterType.Node, update.AnyUpdate, token ); - report.AssetsCreated += _result.Created; - report.AssetsUpdated += _result.Updated; + report.RawAssetsCreated += _result.Created; + report.RawAssetsUpdated += _result.Updated; } catch (Exception e) { log.LogError(e, "Failed to ensure assets"); - result.Objects = false; + result.RawObjects = false; } } @@ -787,15 +770,15 @@ CancellationToken token ) { bool useRawAssets = - config.RawMetadata != null - && !string.IsNullOrWhiteSpace(config.RawMetadata.Database) - && !string.IsNullOrWhiteSpace(config.RawMetadata.AssetsTable); + RawMetadataTargetConfig != null + && !string.IsNullOrWhiteSpace(RawMetadataTargetConfig.Database) + && !string.IsNullOrWhiteSpace(RawMetadataTargetConfig.AssetsTable); if (useRawAssets) { await MarkRawRowsAsDeleted( - config.RawMetadata!.Database!, - config.RawMetadata!.AssetsTable!, + RawMetadataTargetConfig!.Database!, + RawMetadataTargetConfig!.AssetsTable!, externalIds, token ); @@ -845,22 +828,22 @@ private async Task PushRawTimeseries(ConcurrentDictionary ts .Where(kvp => kvp.Value.Source != NodeSource.CDF) .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); - var _result = await cdfWriter.raw.PushNodes( + var _result = await cdfWriter.Raw.PushNodes( Extractor, - config.RawMetadata!.Database!, - config.RawMetadata!.TimeseriesTable!, + RawMetadataTargetConfig!.Database!, + RawMetadataTargetConfig!.TimeseriesTable!, toPushMeta, ConverterType.Variable, - update.AnyUpdate && !config.SkipMetadata, + update.AnyUpdate, token ); - report.TimeSeriesCreated += _result.Created; - report.TimeSeriesUpdated += _result.Updated; + report.RawTimeseriesCreated += _result.Created; + report.RawTimeseriesUpdated += _result.Updated; } catch (Exception e) { log.LogError(e, "Failed to ensure timeseries"); - result.Variables = false; + result.RawVariables = false; } } @@ -870,15 +853,15 @@ CancellationToken token ) { bool useRawTss = - config.RawMetadata != null - && !string.IsNullOrWhiteSpace(config.RawMetadata.Database) - && !string.IsNullOrWhiteSpace(config.RawMetadata.TimeseriesTable); + RawMetadataTargetConfig != null + && !string.IsNullOrWhiteSpace(RawMetadataTargetConfig.Database) + && !string.IsNullOrWhiteSpace(RawMetadataTargetConfig.TimeseriesTable); if (useRawTss) { await MarkRawRowsAsDeleted( - config.RawMetadata!.Database!, - config.RawMetadata!.TimeseriesTable!, + RawMetadataTargetConfig!.Database!, + RawMetadataTargetConfig!.TimeseriesTable!, externalIds, token ); @@ -982,15 +965,15 @@ CancellationToken token ) { bool useRawRelationships = - config.RawMetadata != null - && !string.IsNullOrWhiteSpace(config.RawMetadata.Database) - && !string.IsNullOrWhiteSpace(config.RawMetadata.RelationshipsTable); + RawMetadataTargetConfig != null + && !string.IsNullOrWhiteSpace(RawMetadataTargetConfig.Database) + && !string.IsNullOrWhiteSpace(RawMetadataTargetConfig.RelationshipsTable); if (useRawRelationships) { await MarkRawRowsAsDeleted( - config.RawMetadata!.Database!, - config.RawMetadata!.RelationshipsTable!, + RawMetadataTargetConfig!.Database!, + RawMetadataTargetConfig!.RelationshipsTable!, externalIds, token ); diff --git a/Extractor/Pushers/FDM/DMSValueConverter.cs b/Extractor/Pushers/FDM/DMSValueConverter.cs index f0b237e2..e56ed44f 100644 --- a/Extractor/Pushers/FDM/DMSValueConverter.cs +++ b/Extractor/Pushers/FDM/DMSValueConverter.cs @@ -3,11 +3,8 @@ using Opc.Ua; using System; using System.Collections; -using System.Collections.Generic; using System.Linq; -using System.Text; using System.Text.Json; -using System.Threading.Tasks; namespace Cognite.OpcUa.Pushers.FDM { diff --git a/Extractor/Pushers/FDM/FDMWriter.cs b/Extractor/Pushers/FDM/FDMWriter.cs index c33fda15..06dedf87 100644 --- a/Extractor/Pushers/FDM/FDMWriter.cs +++ b/Extractor/Pushers/FDM/FDMWriter.cs @@ -46,7 +46,7 @@ public FDMWriter(FullConfig config, CogniteDestination destination, ILogger instances, int chunkSize, CancellationToken token) @@ -91,7 +91,7 @@ private async Task Initialize(FDMTypeBatch types, CancellationToken token) var options = new JsonSerializerOptions(Oryx.Cognite.Common.jsonOptions) { WriteIndented = true }; var viewsToInsert = types.Views.Values.ToList(); - if (config.Cognite!.FlexibleDataModels!.SkipSimpleTypes) + if (config.Cognite!.MetadataTargets!.FlexibleDataModels!.SkipSimpleTypes) { viewsToInsert = viewsToInsert.Where(v => v.Properties.Any() || types.ViewIsReferenced.GetValueOrDefault(v.ExternalId)).ToList(); } @@ -108,7 +108,7 @@ private async Task Initialize(FDMTypeBatch types, CancellationToken token) if (config.DryRun) return; // Check if the data model exists - if (config.Cognite!.FlexibleDataModels!.SkipTypesOnEqualCount) + if (config.Cognite!.MetadataTargets!.FlexibleDataModels!.SkipTypesOnEqualCount) { try { @@ -237,7 +237,7 @@ public async Task PushNodes( log.LogInformation("Mapped out {Nodes} nodes and {Edges} edges to write to PG3", nodes.Count, finalReferences.Count); // Run the node filter unless we are writing everything. - if (config.Cognite!.FlexibleDataModels!.ExcludeNonReferenced) + if (config.Cognite!.MetadataTargets!.FlexibleDataModels!.ExcludeNonReferenced) { var trimmer = new NodeTrimmer(nodeHierarchy, config, log); nodeHierarchy = trimmer.Filter(); diff --git a/Extractor/Pushers/FDM/NodeTypeCollector.cs b/Extractor/Pushers/FDM/NodeTypeCollector.cs index 2937bb5a..338ced6a 100644 --- a/Extractor/Pushers/FDM/NodeTypeCollector.cs +++ b/Extractor/Pushers/FDM/NodeTypeCollector.cs @@ -1,6 +1,5 @@ using System.Collections.Generic; using System.Linq; -using Cognite.OpcUa.Config; using Cognite.OpcUa.Nodes; using Cognite.OpcUa.Pushers.FDM.Types; using Cognite.OpcUa.Types; @@ -14,12 +13,10 @@ public class NodeTypeCollector private readonly ILogger log; public Dictionary Types { get; } private readonly Dictionary properties; - private readonly FullConfig config; private readonly HashSet visitedIds = new(); - public NodeTypeCollector(ILogger log, FullConfig config) + public NodeTypeCollector(ILogger log) { this.log = log; - this.config = config; Types = new(); properties = new(); } diff --git a/Extractor/Pushers/FDM/TypeHierarchyBuilder.cs b/Extractor/Pushers/FDM/TypeHierarchyBuilder.cs index 5a1813e7..e27ff39e 100644 --- a/Extractor/Pushers/FDM/TypeHierarchyBuilder.cs +++ b/Extractor/Pushers/FDM/TypeHierarchyBuilder.cs @@ -1,14 +1,7 @@ -using System; using System.Collections.Generic; using System.Linq; -using System.Net; -using System.Text.RegularExpressions; -using System.Threading; -using System.Threading.Tasks; using Cognite.OpcUa.Config; using Cognite.OpcUa.Pushers.FDM.Types; -using Cognite.OpcUa.TypeCollectors; -using Cognite.OpcUa.Types; using CogniteSdk.Beta.DataModels; using Microsoft.Extensions.Logging; using Opc.Ua; @@ -230,9 +223,9 @@ public TypeHierarchyBuilder(ILogger log, DMSValueConverter converter, FullConfig { this.log = log; this.config = config; - nodeTypes = new NodeTypeCollector(log, config); - space = config.Cognite!.FlexibleDataModels!.Space!; - fdmConfig = config.Cognite.FlexibleDataModels!; + nodeTypes = new NodeTypeCollector(log); + space = config.Cognite!.MetadataTargets!.FlexibleDataModels!.Space!; + fdmConfig = config.Cognite!.MetadataTargets!.FlexibleDataModels!; this.converter = converter; } diff --git a/Extractor/Pushers/IPusher.cs b/Extractor/Pushers/IPusher.cs index a9e5c9c6..6c4250f0 100644 --- a/Extractor/Pushers/IPusher.cs +++ b/Extractor/Pushers/IPusher.cs @@ -30,8 +30,11 @@ namespace Cognite.OpcUa public class PushResult { public bool Objects { get; set; } = true; + public bool RawObjects { get; set; } = true; public bool Variables { get; set; } = true; + public bool RawVariables { get; set; } = true; public bool References { get; set; } = true; + public bool RawReferences { get; set; } = true; } public interface IPusher : IDisposable diff --git a/Extractor/Pushers/Writers/AssetsWriter.cs b/Extractor/Pushers/Writers/AssetsWriter.cs index 9050e285..b742f593 100644 --- a/Extractor/Pushers/Writers/AssetsWriter.cs +++ b/Extractor/Pushers/Writers/AssetsWriter.cs @@ -1,4 +1,3 @@ -using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Threading; @@ -29,7 +28,7 @@ public AssetsWriter(ILogger logger, CogniteDestination destination this.destination = destination; } - public async Task PushNodes(UAExtractor extractor, ConcurrentDictionary nodes, IDictionary nodeToAssetIds, TypeUpdateConfig update, CancellationToken token) + public async Task PushNodes(UAExtractor extractor, IDictionary nodes, IDictionary nodeToAssetIds, TypeUpdateConfig update, CancellationToken token) { var result = new Result { Created = 0, Updated = 0 }; var assets = await CreateAssets(extractor, nodes, nodeToAssetIds, result, token); diff --git a/Extractor/Pushers/Writers/CDFWriter.cs b/Extractor/Pushers/Writers/CDFWriter.cs index 19bf8f13..2a35d38e 100644 --- a/Extractor/Pushers/Writers/CDFWriter.cs +++ b/Extractor/Pushers/Writers/CDFWriter.cs @@ -4,22 +4,25 @@ namespace Cognite.OpcUa.Pushers.Writers { public class CDFWriter : ICDFWriter { - public IRawWriter raw { get; } - public ITimeseriesWriter timeseries { get; } - public IAssetsWriter assets { get; } - public IRelationshipsWriter relationships{ get; } + public IRawWriter Raw { get; } + public ITimeseriesWriter Timeseries { get; } + public IAssetsWriter Assets { get; } + public IRelationshipsWriter Relationships{ get; } + public ITimeseriesWriter MinimalTimeseries { get; } public CDFWriter( IRawWriter rawWriter, ITimeseriesWriter timeseriesWriter, IAssetsWriter assetsWriter, - IRelationshipsWriter relationshipsWriter + IRelationshipsWriter relationshipsWriter, + ITimeseriesWriter minimalTimeSeriesWriter ) { - this.raw = rawWriter; - this.timeseries = timeseriesWriter; - this.assets = assetsWriter; - this.relationships = relationshipsWriter; + this.Raw = rawWriter; + this.Timeseries = timeseriesWriter; + this.Assets = assetsWriter; + this.Relationships = relationshipsWriter; + this.MinimalTimeseries = minimalTimeSeriesWriter; } } } diff --git a/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs b/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs index bfddbce6..009b52c3 100644 --- a/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/IAssetsWriter.cs @@ -1,4 +1,3 @@ -using System.Collections.Concurrent; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; @@ -13,7 +12,7 @@ public interface IAssetsWriter { Task PushNodes( UAExtractor extractor, - ConcurrentDictionary assetMap, + IDictionary assetMap, IDictionary nodeToAssetIds, TypeUpdateConfig config, CancellationToken token diff --git a/Extractor/Pushers/Writers/Interfaces/ICDFWriter.cs b/Extractor/Pushers/Writers/Interfaces/ICDFWriter.cs index 71246c7d..62e1e189 100644 --- a/Extractor/Pushers/Writers/Interfaces/ICDFWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/ICDFWriter.cs @@ -2,9 +2,10 @@ namespace Cognite.OpcUa.Pushers.Writers.Interfaces { public interface ICDFWriter { - IRawWriter raw { get; } - ITimeseriesWriter timeseries { get; } - IAssetsWriter assets { get; } - IRelationshipsWriter relationships { get; } + IRawWriter Raw { get; } + ITimeseriesWriter Timeseries { get; } + ITimeseriesWriter MinimalTimeseries { get; } + IAssetsWriter Assets { get; } + IRelationshipsWriter Relationships { get; } } } diff --git a/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs b/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs index 710a15cf..ecb71c29 100644 --- a/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs @@ -1,4 +1,3 @@ -using System.Collections.Concurrent; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; @@ -13,7 +12,7 @@ public interface ITimeseriesWriter { Task PushVariables( UAExtractor extractor, - ConcurrentDictionary timeseriesMap, + IDictionary timeseriesMap, IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, TypeUpdateConfig update, diff --git a/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs b/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs new file mode 100644 index 00000000..bc46efbe --- /dev/null +++ b/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs @@ -0,0 +1,18 @@ +using Cognite.Extractor.Utils; +using Cognite.OpcUa.Config; +using Cognite.OpcUa.Pushers.Writers.Interfaces; +using Microsoft.Extensions.Logging; + +namespace Cognite.OpcUa.Pushers.Writers +{ + public class MinimalTimeseriesWriter : TimeseriesWriter, ITimeseriesWriter + { + public MinimalTimeseriesWriter( + ILogger logger, + CogniteDestination destination, + FullConfig config + ) + : base(logger, destination, config) { } + protected override bool createMinimalTimeseries => true; + } +} diff --git a/Extractor/Pushers/Writers/TimeseriesWriter.cs b/Extractor/Pushers/Writers/TimeseriesWriter.cs index 1ead4f0d..2444e801 100644 --- a/Extractor/Pushers/Writers/TimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/TimeseriesWriter.cs @@ -1,5 +1,4 @@ using System; -using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Threading; @@ -22,9 +21,7 @@ public class TimeseriesWriter : ITimeseriesWriter private ILogger log; private readonly FullConfig config; private readonly CogniteDestination destination; - private bool pushCleanTimeseries => - string.IsNullOrWhiteSpace(config.Cognite?.RawMetadata?.Database) - && string.IsNullOrWhiteSpace(config.Cognite?.RawMetadata?.TimeseriesTable); + protected virtual bool createMinimalTimeseries => !(config.Cognite?.MetadataTargets?.CleanMetadata?.Timeseries ?? false); public TimeseriesWriter(ILogger logger, CogniteDestination destination, FullConfig config) { @@ -33,9 +30,9 @@ public TimeseriesWriter(ILogger logger, CogniteDestination des this.destination = destination; } - public async Task PushVariables( + public virtual async Task PushVariables( UAExtractor extractor, - ConcurrentDictionary timeseriesMap, + IDictionary timeseriesMap, IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, TypeUpdateConfig update, @@ -43,14 +40,13 @@ CancellationToken token ) { var result = new Result { Created = 0, Updated = 0 }; - var skipMeta = config.Cognite?.SkipMetadata; var timeseries = await CreateTimeseries( extractor, timeseriesMap, nodeToAssetIds, mismatchedTimeseries, result, - !pushCleanTimeseries || (skipMeta.HasValue ? skipMeta.Value : false), + createMinimalTimeseries, token ); @@ -58,13 +54,13 @@ CancellationToken token .Where(kvp => kvp.Value.Source != NodeSource.CDF) .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); - if (update.AnyUpdate && toPushMeta.Any() && pushCleanTimeseries) + if (update.AnyUpdate && toPushMeta.Any()) { await UpdateTimeseries(extractor, toPushMeta, timeseries, nodeToAssetIds, update, result, token); } return result; } - + private async Task> CreateTimeseries( UAExtractor extractor, IDictionary tsMap, diff --git a/Extractor/Pushers/Writers/WriterUtils.cs b/Extractor/Pushers/Writers/WriterUtils.cs index cf76bdd1..de80085f 100644 --- a/Extractor/Pushers/Writers/WriterUtils.cs +++ b/Extractor/Pushers/Writers/WriterUtils.cs @@ -16,11 +16,7 @@ public static void AddWriters(this IServiceCollection services, CancellationToke var dest = provider.GetRequiredService(); var config = provider.GetRequiredService(); return new CDFWriter( - new RawWriter( - provider.GetRequiredService>(), - dest, - config - ), + new RawWriter(provider.GetRequiredService>(), dest, config), new TimeseriesWriter( provider.GetRequiredService>(), dest, @@ -35,6 +31,11 @@ public static void AddWriters(this IServiceCollection services, CancellationToke provider.GetRequiredService>(), dest, config + ), + new MinimalTimeseriesWriter( + provider.GetRequiredService>(), + dest, + config ) ); }); diff --git a/ExtractorLauncher/ExtractorStarter.cs b/ExtractorLauncher/ExtractorStarter.cs index 6518869d..8ab17d7c 100644 --- a/ExtractorLauncher/ExtractorStarter.cs +++ b/ExtractorLauncher/ExtractorStarter.cs @@ -125,6 +125,45 @@ public static class ExtractorStarter { return "subscriptions.keep-alive-count must be greater than 0"; } + if (config.Cognite?.RawMetadata != null) + { + log.LogWarning("cognite.raw-metadata is deprecated. Use cognite.metadata-targets instead"); + if (config.Cognite.MetadataTargets != null) + { + return "cognite.raw-metadata and cognite.metadata-targets cannot be set at the same time."; + } + if (config.Cognite == null) config.Cognite = new CognitePusherConfig(); + var rawMetadata = config.Cognite.RawMetadata; + var useCleanAssets = (rawMetadata?.Database == null || rawMetadata?.AssetsTable == null) || config.Cognite.SkipMetadata; + var useCleanTimeseries = rawMetadata?.Database == null || rawMetadata?.TimeseriesTable == null; + var useCleanRelationships = rawMetadata?.Database == null || rawMetadata?.RelationshipsTable == null; + config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + CleanMetadata = new CleanMetadataTargetConfig + { + Assets = useCleanAssets, + Timeseries = useCleanTimeseries, + Relationships = useCleanRelationships + }, + RawMetadata = new RawMetadataTargetConfig + { + Database = rawMetadata?.Database, + AssetsTable = rawMetadata?.AssetsTable, + TimeseriesTable = rawMetadata?.TimeseriesTable, + RelationshipsTable = rawMetadata?.RelationshipsTable + } + }; + } + if (config.Cognite?.FlexibleDataModels != null) + { + log.LogWarning("cognite.flexible-data-models is deprecated. Use cognite.metadata-targets.flexible-data-models instead"); + + if (config.Cognite == null) config.Cognite = new CognitePusherConfig(); + if (config.Cognite.MetadataTargets == null) config.Cognite.MetadataTargets = new MetadataTargetsConfig(); + if (config.Cognite.MetadataTargets.FlexibleDataModels == null) { + config.Cognite.MetadataTargets.FlexibleDataModels = config.Cognite.FlexibleDataModels; + } + } return null; } diff --git a/Test/CDFMockHandler.cs b/Test/CDFMockHandler.cs index 3e4f1ae9..ce114c25 100644 --- a/Test/CDFMockHandler.cs +++ b/Test/CDFMockHandler.cs @@ -49,7 +49,7 @@ public class CDFMockHandler public Dictionary NumericDatapoints, List StringDatapoints)> Datapoints { get; } = new Dictionary NumericDatapoints, List StringDatapoints)>(); - public Dictionary AssetRaw { get; } = new Dictionary(); + public Dictionary AssetsRaw { get; } = new Dictionary(); public Dictionary TimeseriesRaw { get; } = new Dictionary(); public Dictionary Relationships { get; } = new Dictionary(); public Dictionary RelationshipsRaw { get; } = new Dictionary(); @@ -726,7 +726,7 @@ private HttpResponseMessage HandleGetRawAssets() { var data = new RawListWrapper { - items = AssetRaw.Select(kvp => new RawWrapper { columns = kvp.Value, key = kvp.Key, lastUpdatedTime = 0 }) + items = AssetsRaw.Select(kvp => new RawWrapper { columns = kvp.Value, key = kvp.Key, lastUpdatedTime = 0 }) }; var content = System.Text.Json.JsonSerializer.Serialize(data); return new HttpResponseMessage(HttpStatusCode.OK) @@ -764,7 +764,7 @@ private HttpResponseMessage HandleCreateRawAssets(string content) var toCreate = System.Text.Json.JsonSerializer.Deserialize>(content); foreach (var item in toCreate.items) { - AssetRaw[item.key] = item.columns; + AssetsRaw[item.key] = item.columns; } return new HttpResponseMessage(HttpStatusCode.OK) diff --git a/Test/Integration/NodeExtractionTests.cs b/Test/Integration/NodeExtractionTests.cs index 1c30ad35..448e986e 100644 --- a/Test/Integration/NodeExtractionTests.cs +++ b/Test/Integration/NodeExtractionTests.cs @@ -867,6 +867,14 @@ public async Task TestUpdateFields( var (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + CleanMetadata = new CleanMetadataTargetConfig + { + Assets = true, + Timeseries = true + } + }; var upd = tester.Config.Extraction.Update; upd.Objects.Name = assetName; upd.Objects.Description = assetDesc; @@ -907,8 +915,8 @@ public async Task TestUpdateFields( } [Theory] [InlineData(true, false)] - [InlineData(false, true)] - [InlineData(true, true)] + // [InlineData(false, true)] + // [InlineData(true, true)] public async Task TestUpdateFieldsRaw(bool assets, bool timeseries) { var (handler, pusher) = tester.GetCDFPusher(); @@ -926,11 +934,20 @@ public async Task TestUpdateFieldsRaw(bool assets, bool timeseries) tester.Config.Extraction.RootNode = CommonTestUtils.ToProtoNodeId(tester.Server.Ids.Custom.Root, tester.Client); - tester.Config.Cognite.RawMetadata = new RawMetadataConfig + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - Database = "metadata", - AssetsTable = "assets", - TimeseriesTable = "timeseries" + CleanMetadata = new CleanMetadataTargetConfig + { + Relationships = true, + Assets = false, + Timeseries = false + }, + RawMetadata = new RawMetadataTargetConfig + { + Database = "metadata", + AssetsTable = "assets", + TimeseriesTable = "timeseries" + } }; tester.Config.Extraction.DataTypes.AllowStringVariables = true; @@ -939,10 +956,10 @@ public async Task TestUpdateFieldsRaw(bool assets, bool timeseries) var runTask = extractor.RunExtractor(); - await TestUtils.WaitForCondition(() => handler.AssetRaw.Any() && handler.TimeseriesRaw.Any(), 5); + await TestUtils.WaitForCondition(() => handler.AssetsRaw.Any() && handler.TimeseriesRaw.Any(), 5); CommonTestUtils.VerifyStartingConditions( - handler.AssetRaw + handler.AssetsRaw .ToDictionary(kvp => kvp.Key, kvp => (AssetDummy)JsonSerializer.Deserialize(kvp.Value.ToString())), handler.TimeseriesRaw .ToDictionary(kvp => kvp.Key, kvp => (TimeseriesDummy) @@ -953,13 +970,13 @@ public async Task TestUpdateFieldsRaw(bool assets, bool timeseries) await extractor.Rebrowse(); CommonTestUtils.VerifyStartingConditions( - handler.AssetRaw + handler.AssetsRaw .ToDictionary(kvp => kvp.Key, kvp => (AssetDummy)JsonSerializer.Deserialize(kvp.Value.ToString())), handler.TimeseriesRaw .ToDictionary(kvp => kvp.Key, kvp => (TimeseriesDummy) JsonSerializer.Deserialize(kvp.Value.ToString())), upd, extractor, tester.Server.Ids.Custom, true); CommonTestUtils.VerifyModified( - handler.AssetRaw + handler.AssetsRaw .ToDictionary(kvp => kvp.Key, kvp => (AssetDummy)JsonSerializer.Deserialize(kvp.Value.ToString())), handler.TimeseriesRaw .ToDictionary(kvp => kvp.Key, kvp => (TimeseriesDummy) @@ -994,6 +1011,14 @@ public async Task TestUpdateNullPropertyValue() Metadata = true } }; + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + CleanMetadata = new CleanMetadataTargetConfig + { + Assets = true, + Timeseries = true + } + }; tester.Server.Server.MutateNode(tester.Server.Ids.Wrong.TooLargeProp, state => { diff --git a/Test/Unit/CDFPusherTest.cs b/Test/Unit/CDFPusherTest.cs index b855f6f8..018d18a6 100644 --- a/Test/Unit/CDFPusherTest.cs +++ b/Test/Unit/CDFPusherTest.cs @@ -277,6 +277,13 @@ public async Task TestCreateUpdateAssets() using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); tester.Config.Cognite.RawMetadata = null; + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + CleanMetadata = new CleanMetadataTargetConfig + { + Assets = true + } + }; var rels = Enumerable.Empty(); var tss = Enumerable.Empty(); @@ -342,28 +349,43 @@ public async Task TestCreateRawAssets() AssetsTable = "assets", Database = "metadata" }; + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + CleanMetadata = new CleanMetadataTargetConfig + { + Relationships = true, + Assets = false, + Timeseries = true + }, + RawMetadata = new RawMetadataTargetConfig + { + Database = "metadata", + AssetsTable = "assets" + } + }; + var node = new UAObject(tester.Server.Ids.Base.Root, "BaseRoot", null, null, NodeId.Null, null); var rels = Enumerable.Empty(); var tss = Enumerable.Empty(); var update = new UpdateConfig(); // Fail to create handler.FailedRoutes.Add("/raw/dbs/metadata/tables/assets/rows"); - Assert.False((await pusher.PushNodes(new[] { node }, tss, rels, update, tester.Source.Token)).Objects); - Assert.Empty(handler.AssetRaw); + Assert.False((await pusher.PushNodes(new[] { node }, tss, rels, update, tester.Source.Token)).RawObjects); + Assert.Empty(handler.AssetsRaw); // Create one handler.FailedRoutes.Clear(); Assert.True((await pusher.PushNodes(new[] { node }, tss, rels, update, tester.Source.Token)).Objects); - Assert.Single(handler.AssetRaw); - Assert.Equal("BaseRoot", handler.AssetRaw.First().Value.GetProperty("name").GetString()); + Assert.Single(handler.AssetsRaw); + Assert.Equal("BaseRoot", handler.AssetsRaw.First().Value.GetProperty("name").GetString()); // Create another, do not overwrite the existing one, due to no update settings var node2 = new UAObject(tester.Server.Ids.Custom.Root, "CustomRoot", null, null, NodeId.Null, null); node.Attributes.Description = "description"; Assert.True((await pusher.PushNodes(new[] { node, node2 }, tss, rels, update, tester.Source.Token)).Objects); - Assert.Equal(2, handler.AssetRaw.Count); - Assert.Null(handler.AssetRaw.First().Value.GetProperty("description").GetString()); - Assert.Null(handler.AssetRaw.Last().Value.GetProperty("description").GetString()); + Assert.Equal(2, handler.AssetsRaw.Count); + Assert.Null(handler.AssetsRaw.First().Value.GetProperty("description").GetString()); + Assert.Null(handler.AssetsRaw.Last().Value.GetProperty("description").GetString()); Assert.True(CommonTestUtils.TestMetricValue("opcua_node_ensure_failures_cdf", 1)); } @@ -378,6 +400,21 @@ public async Task TestUpdateRawAssets() AssetsTable = "assets", Database = "metadata" }; + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + CleanMetadata = new CleanMetadataTargetConfig + { + Relationships = true, + Assets = false, + Timeseries = true + }, + RawMetadata = new RawMetadataTargetConfig + { + Database = "metadata", + AssetsTable = "assets", + TimeseriesTable = "timeseries" + } + }; var node = new UAObject(tester.Server.Ids.Base.Root, "BaseRoot", null, null, NodeId.Null, null); var rels = Enumerable.Empty(); var tss = Enumerable.Empty(); @@ -389,21 +426,21 @@ public async Task TestUpdateRawAssets() // Fail to upsert handler.FailedRoutes.Add("/raw/dbs/metadata/tables/assets/rows"); - Assert.False((await pusher.PushNodes(new[] { node }, tss, rels, update, tester.Source.Token)).Objects); - Assert.Empty(handler.AssetRaw); + Assert.False((await pusher.PushNodes(new[] { node }, tss, rels, update, tester.Source.Token)).RawObjects); + Assert.Empty(handler.AssetsRaw); // Create one handler.FailedRoutes.Clear(); - Assert.True((await pusher.PushNodes(new[] { node }, tss, rels, update, tester.Source.Token)).Objects); - Assert.Single(handler.AssetRaw); - Assert.Equal("BaseRoot", handler.AssetRaw.First().Value.GetProperty("name").GetString()); + Assert.True((await pusher.PushNodes(new[] { node }, tss, rels, update, tester.Source.Token)).RawObjects); + Assert.Single(handler.AssetsRaw); + Assert.Equal("BaseRoot", handler.AssetsRaw.First().Value.GetProperty("name").GetString()); // Create another, overwrite the existing one var node2 = new UAObject(tester.Server.Ids.Custom.Root, "CustomRoot", null, null, NodeId.Null, null); node.Attributes.Description = "description"; - Assert.True((await pusher.PushNodes(new[] { node, node2 }, tss, rels, update, tester.Source.Token)).Objects); - Assert.Equal(2, handler.AssetRaw.Count); - Assert.Single(handler.AssetRaw, asset => asset.Value.GetProperty("description").GetString() == "description"); + Assert.True((await pusher.PushNodes(new[] { node, node2 }, tss, rels, update, tester.Source.Token)).RawObjects); + Assert.Equal(2, handler.AssetsRaw.Count); + Assert.Single(handler.AssetsRaw, asset => asset.Value.GetProperty("description").GetString() == "description"); Assert.True(CommonTestUtils.TestMetricValue("opcua_node_ensure_failures_cdf", 1)); } @@ -413,6 +450,15 @@ public async Task TestCreateUpdateTimeseries() using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); tester.Config.Cognite.RawMetadata = null; + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + CleanMetadata = new CleanMetadataTargetConfig + { + Relationships = true, + Assets = false, + Timeseries = true + }, + }; var dt = new UADataType(DataTypeIds.Double); @@ -489,6 +535,21 @@ public async Task TestCreateRawTimeseries() TimeseriesTable = "timeseries", Database = "metadata" }; + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + CleanMetadata = new CleanMetadataTargetConfig + { + Relationships = true, + Assets = false, + Timeseries = false + }, + RawMetadata = new RawMetadataTargetConfig + { + Database = "metadata", + AssetsTable = "assets", + TimeseriesTable = "timeseries" + } + }; var dt = new UADataType(DataTypeIds.Double); @@ -500,12 +561,12 @@ public async Task TestCreateRawTimeseries() // Fail to create handler.FailedRoutes.Add("/raw/dbs/metadata/tables/timeseries/rows"); - Assert.False((await pusher.PushNodes(assets, new[] { node }, rels, update, tester.Source.Token)).Variables); + Assert.False((await pusher.PushNodes(assets, new[] { node }, rels, update, tester.Source.Token)).RawVariables); Assert.Empty(handler.TimeseriesRaw); // Create one handler.FailedRoutes.Clear(); - Assert.True((await pusher.PushNodes(assets, new[] { node }, rels, update, tester.Source.Token)).Variables); + Assert.True((await pusher.PushNodes(assets, new[] { node }, rels, update, tester.Source.Token)).RawVariables); Assert.Single(handler.TimeseriesRaw); Assert.Equal("Variable 1", handler.TimeseriesRaw.First().Value.GetProperty("name").GetString()); @@ -513,7 +574,7 @@ public async Task TestCreateRawTimeseries() var node2 = new UAVariable(tester.Server.Ids.Custom.MysteryVar, "MysteryVar", null, null, new NodeId("parent"), null); node2.FullAttributes.DataType = dt; node.Attributes.Description = "description"; - Assert.True((await pusher.PushNodes(assets, new[] { node, node2 }, rels, update, tester.Source.Token)).Variables); + Assert.True((await pusher.PushNodes(assets, new[] { node, node2 }, rels, update, tester.Source.Token)).RawVariables); Assert.Equal(2, handler.TimeseriesRaw.Count); Assert.Null(handler.TimeseriesRaw.First().Value.GetProperty("description").GetString()); Assert.Null(handler.TimeseriesRaw.Last().Value.GetProperty("description").GetString()); @@ -531,6 +592,21 @@ public async Task TestUpdateRawTimeseries() TimeseriesTable = "timeseries", Database = "metadata" }; + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + CleanMetadata = new CleanMetadataTargetConfig + { + Relationships = true, + Assets = false, + Timeseries = false + }, + RawMetadata = new RawMetadataTargetConfig + { + Database = "metadata", + AssetsTable = "assets", + TimeseriesTable = "timeseries" + } + }; var dt = new UADataType(DataTypeIds.Double); @@ -551,12 +627,12 @@ public async Task TestUpdateRawTimeseries() // Fail to upsert handler.FailedRoutes.Add("/raw/dbs/metadata/tables/timeseries/rows"); - Assert.False((await pusher.PushNodes(assets, new[] { node }, rels, update, tester.Source.Token)).Variables); + Assert.False((await pusher.PushNodes(assets, new[] { node }, rels, update, tester.Source.Token)).RawVariables); Assert.Empty(handler.TimeseriesRaw); // Create one handler.FailedRoutes.Clear(); - Assert.True((await pusher.PushNodes(assets, new[] { node }, rels, update, tester.Source.Token)).Variables); + Assert.True((await pusher.PushNodes(assets, new[] { node }, rels, update, tester.Source.Token)).RawVariables); Assert.Single(handler.TimeseriesRaw); Assert.Equal("Variable 1", handler.TimeseriesRaw.First().Value.GetProperty("name").GetString()); @@ -584,6 +660,16 @@ public async Task TestNodeCallback() using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); tester.Config.Cognite.RawMetadata = null; + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + CleanMetadata = new CleanMetadataTargetConfig + { + Relationships = true, + Assets = true, + Timeseries = true + }, + }; + var dt = new UADataType(DataTypes.Double); @@ -676,6 +762,20 @@ public async Task TestRawNodeCallback() AssetsTable = "assets", Database = "metadata" }; + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + CleanMetadata = new CleanMetadataTargetConfig + { + Timeseries = false, + }, + RawMetadata = new RawMetadataTargetConfig + { + Database = "metadata", + AssetsTable = "assets", + TimeseriesTable = "timeseries", + RelationshipsTable = "relationships" + } + }; (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); @@ -711,9 +811,9 @@ await pusher.PushNodes(Enumerable.Empty(), Enumerable.Empty(); var tss = Enumerable.Empty(); @@ -921,10 +1029,13 @@ public async Task TestCreateRawRelationships() using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); - tester.Config.Cognite.RawMetadata = new RawMetadataConfig + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - RelationshipsTable = "relationships", - Database = "metadata" + RawMetadata = new RawMetadataTargetConfig + { + RelationshipsTable = "relationships", + Database = "metadata" + } }; tester.Config.Extraction.Relationships.Enabled = true; @@ -943,12 +1054,12 @@ public async Task TestCreateRawRelationships() }; await extractor.TypeManager.LoadTypeData(tester.Source.Token); handler.FailedRoutes.Add("/raw/dbs/metadata/tables/relationships/rows"); - Assert.False((await pusher.PushNodes(assets, tss, references, update, tester.Source.Token)).References); + Assert.False((await pusher.PushNodes(assets, tss, references, update, tester.Source.Token)).RawReferences); Assert.Empty(handler.RelationshipsRaw); // Push successful handler.FailedRoutes.Clear(); - Assert.True((await pusher.PushNodes(assets, tss, references, update, tester.Source.Token)).References); + Assert.True((await pusher.PushNodes(assets, tss, references, update, tester.Source.Token)).RawReferences); Assert.Equal(2, handler.RelationshipsRaw.Count); // Push again, with duplicates @@ -997,7 +1108,7 @@ private void NodeToRaw(UAExtractor extractor, BaseUANode node, ConverterType typ } else { - handler.AssetRaw[id] = val; + handler.AssetsRaw[id] = val; } } @@ -1117,11 +1228,14 @@ public async Task TestCDFAsSourceData() Database = "metadata", Enable = true }; - tester.Config.Cognite.RawMetadata = new RawMetadataConfig + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - AssetsTable = "assets", - TimeseriesTable = "timeseries", - Database = "metadata" + RawMetadata = new RawMetadataTargetConfig + { + AssetsTable = "assets", + TimeseriesTable = "timeseries", + Database = "metadata" + } }; tester.Config.Extraction.DataTypes.ExpandNodeIds = true; tester.Config.Extraction.DataTypes.AppendInternalValues = true; @@ -1140,7 +1254,7 @@ public async Task TestCDFAsSourceData() tester.Config.Cognite.RawNodeBuffer.BrowseOnEmpty = true; await extractor.RunExtractor(true); Assert.True(extractor.State.NodeStates.Any()); - Assert.True(handler.AssetRaw.Any()); + Assert.True(handler.AssetsRaw.Any()); Assert.True(handler.TimeseriesRaw.Any()); Assert.True(handler.Timeseries.Any()); Assert.Empty(handler.Assets); @@ -1152,7 +1266,7 @@ public async Task TestCDFAsSourceData() // Now there is something in CDF, read it back tester.Config.Cognite.RawNodeBuffer.BrowseOnEmpty = false; - string oldAssets = System.Text.Json.JsonSerializer.Serialize(handler.AssetRaw); + string oldAssets = System.Text.Json.JsonSerializer.Serialize(handler.AssetsRaw); string oldTimeseries = System.Text.Json.JsonSerializer.Serialize(handler.TimeseriesRaw); handler.Timeseries.Clear(); extractor.GetType().GetField("subscribed", BindingFlags.NonPublic | BindingFlags.Instance).SetValue(extractor, 0); @@ -1160,7 +1274,7 @@ public async Task TestCDFAsSourceData() await extractor.RunExtractor(true); Assert.True(extractor.State.NodeStates.Any()); - string newAssets = System.Text.Json.JsonSerializer.Serialize(handler.AssetRaw); + string newAssets = System.Text.Json.JsonSerializer.Serialize(handler.AssetsRaw); string newTimeseries = System.Text.Json.JsonSerializer.Serialize(handler.TimeseriesRaw); // Ensure data in raw is untouched. @@ -1192,11 +1306,14 @@ public async Task TestCDFAsSourceEvents() Database = "metadata", Enable = true }; - tester.Config.Cognite.RawMetadata = new RawMetadataConfig + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - AssetsTable = "assets", - TimeseriesTable = "timeseries", - Database = "metadata" + RawMetadata = new RawMetadataTargetConfig + { + AssetsTable = "assets", + TimeseriesTable = "timeseries", + Database = "metadata" + } }; tester.Config.Extraction.DataTypes.ExpandNodeIds = true; tester.Config.Extraction.DataTypes.AppendInternalValues = true; @@ -1214,7 +1331,7 @@ public async Task TestCDFAsSourceEvents() tester.Config.Cognite.RawNodeBuffer.BrowseOnEmpty = true; await extractor.RunExtractor(true); Assert.True(extractor.State.NodeStates.Any()); - Assert.True(handler.AssetRaw.Any()); + Assert.True(handler.AssetsRaw.Any()); Assert.True(handler.TimeseriesRaw.Any()); Assert.True(handler.Timeseries.Any()); Assert.Empty(handler.Assets); @@ -1226,7 +1343,7 @@ public async Task TestCDFAsSourceEvents() // Now there is something in CDF, read it back tester.Config.Cognite.RawNodeBuffer.BrowseOnEmpty = false; - string oldAssets = System.Text.Json.JsonSerializer.Serialize(handler.AssetRaw); + string oldAssets = System.Text.Json.JsonSerializer.Serialize(handler.AssetsRaw); string oldTimeseries = System.Text.Json.JsonSerializer.Serialize(handler.TimeseriesRaw); handler.Timeseries.Clear(); extractor.GetType().GetField("subscribed", BindingFlags.NonPublic | BindingFlags.Instance).SetValue(extractor, 0); @@ -1234,7 +1351,7 @@ public async Task TestCDFAsSourceEvents() await extractor.RunExtractor(true); Assert.True(extractor.State.NodeStates.Any()); - string newAssets = System.Text.Json.JsonSerializer.Serialize(handler.AssetRaw); + string newAssets = System.Text.Json.JsonSerializer.Serialize(handler.AssetsRaw); string newTimeseries = System.Text.Json.JsonSerializer.Serialize(handler.TimeseriesRaw); // Ensure data in raw is untouched. @@ -1264,11 +1381,19 @@ public async Task TestCDFNodeSetBackground() Database = "metadata", Enable = true }; - tester.Config.Cognite.RawMetadata = new RawMetadataConfig + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - AssetsTable = "assets", - TimeseriesTable = "timeseries", - Database = "metadata" + CleanMetadata = new CleanMetadataTargetConfig + { + Relationships = true, + Assets = false, + }, + RawMetadata = new RawMetadataTargetConfig + { + Database = "metadata", + TimeseriesTable = "timeseries", + AssetsTable = "assets" + } }; tester.Config.Extraction.DataTypes.ExpandNodeIds = true; tester.Config.Extraction.DataTypes.AppendInternalValues = true; @@ -1284,7 +1409,7 @@ public async Task TestCDFNodeSetBackground() tester.Config.Cognite.RawNodeBuffer.BrowseOnEmpty = true; await extractor.RunExtractor(true); Assert.True(extractor.State.NodeStates.Any()); - Assert.True(handler.AssetRaw.Any()); + Assert.True(handler.AssetsRaw.Any()); Assert.True(handler.TimeseriesRaw.Any()); Assert.True(handler.Timeseries.Any()); Assert.Empty(handler.Assets); diff --git a/Test/Unit/DeleteTest.cs b/Test/Unit/DeleteTest.cs index 4b5498ba..392d58d3 100644 --- a/Test/Unit/DeleteTest.cs +++ b/Test/Unit/DeleteTest.cs @@ -391,6 +391,14 @@ public async Task TestCDFDelete() var addedExtId = tester.Client.GetUniqueId(addedId); var addedVarExtId = tester.Client.GetUniqueId(addedVarId); + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + CleanMetadata = new CleanMetadataTargetConfig + { + Assets = true, + // Timeseries = true + } + }; // Run the extractor and verify that we got the node. await extractor.RunExtractor(true); Assert.True(handler.Assets.ContainsKey(addedExtId)); @@ -427,12 +435,19 @@ public async Task TestCDFDeleteRaw() tester.Config.Extraction.Relationships.Enabled = true; tester.Config.Extraction.Relationships.Hierarchical = true; tester.Config.Cognite.DeleteRelationships = true; - tester.Config.Cognite.RawMetadata = new RawMetadataConfig + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - AssetsTable = "assets", - TimeseriesTable = "timeseries", - Database = "metadata", - RelationshipsTable = "relationships" + CleanMetadata = new CleanMetadataTargetConfig + { + Timeseries = true, + }, + RawMetadata = new RawMetadataTargetConfig + { + Database = "metadata", + AssetsTable = "assets", + TimeseriesTable = "timeseries", + RelationshipsTable = "relationships" + } }; using var stateStore = new MockStateStore(); @@ -445,11 +460,11 @@ public async Task TestCDFDeleteRaw() // Run the extractor and verify that we got the node. await extractor.RunExtractor(true); - Assert.True(handler.AssetRaw.ContainsKey(addedExtId)); + Assert.True(handler.AssetsRaw.ContainsKey(addedExtId)); Assert.True(handler.TimeseriesRaw.ContainsKey(addedVarExtId)); handler.Timeseries.Values.ToList().ForEach(v => _output.WriteLine(v.ToString())); Assert.True(handler.Timeseries.ContainsKey(addedVarExtId)); - Assert.False(handler.AssetRaw[addedExtId].TryGetProperty("deleted", out _)); + Assert.False(handler.AssetsRaw[addedExtId].TryGetProperty("deleted", out _)); Assert.False(handler.TimeseriesRaw[addedVarExtId].TryGetProperty("deleted", out _)); Assert.False(handler.Timeseries[addedVarExtId].metadata?.ContainsKey("deleted") ?? false); // Need to build the reference externalId late, since it depends on the reference type manager being populated. @@ -465,7 +480,7 @@ public async Task TestCDFDeleteRaw() tester.Server.Server.RemoveNode(addedId); tester.Server.Server.RemoveNode(addedVarId); await extractor.Rebrowse(); - Assert.True(handler.AssetRaw[addedExtId].GetProperty("deleted").GetBoolean()); + Assert.True(handler.AssetsRaw[addedExtId].GetProperty("deleted").GetBoolean()); Assert.True(handler.TimeseriesRaw[addedVarExtId].GetProperty("deleted").GetBoolean()); Assert.True(handler.RelationshipsRaw[refExtId].deleted); diff --git a/Test/Unit/FDMTests.cs b/Test/Unit/FDMTests.cs index 5c3c8a13..5b86c819 100644 --- a/Test/Unit/FDMTests.cs +++ b/Test/Unit/FDMTests.cs @@ -2,9 +2,7 @@ using Microsoft.Extensions.Logging; using Server; using System; -using System.Collections.Generic; using System.Linq; -using System.Text; using System.Threading.Tasks; using Test.Utils; using Xunit; @@ -25,11 +23,14 @@ public FDMTests(ITestOutputHelper output, FDMTestFixture tester) this.tester = tester ?? throw new ArgumentNullException(nameof(tester)); tester.Init(output); tester.ResetConfig(); - tester.Config.Cognite.FlexibleDataModels = new FdmDestinationConfig + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - Enabled = true, - Space = "test", - ExcludeNonReferenced = true, + FlexibleDataModels = new FdmDestinationConfig + { + Enabled = true, + Space = "test", + ExcludeNonReferenced = true, + } }; tester.Config.Extraction.RootNode = new ProtoNodeId { @@ -45,8 +46,8 @@ public FDMTests(ITestOutputHelper output, FDMTestFixture tester) [Fact] public async Task TestMapCustomTypes() { - tester.Config.Cognite.FlexibleDataModels.ExcludeNonReferenced = true; - tester.Config.Cognite.FlexibleDataModels.TypesToMap = TypesToMap.Custom; + tester.Config.Cognite.MetadataTargets.FlexibleDataModels.ExcludeNonReferenced = true; + tester.Config.Cognite.MetadataTargets.FlexibleDataModels.TypesToMap = TypesToMap.Custom; var (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); @@ -71,8 +72,8 @@ public async Task TestMapCustomTypes() [Fact] public async Task TestMapReferencedTypes() { - tester.Config.Cognite.FlexibleDataModels.ExcludeNonReferenced = true; - tester.Config.Cognite.FlexibleDataModels.TypesToMap = TypesToMap.Referenced; + tester.Config.Cognite.MetadataTargets.FlexibleDataModels.ExcludeNonReferenced = true; + tester.Config.Cognite.MetadataTargets.FlexibleDataModels.TypesToMap = TypesToMap.Referenced; var (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); @@ -93,8 +94,8 @@ public async Task TestMapReferencedTypes() [Fact] public async Task TestMapReferencedTypesNoTrim() { - tester.Config.Cognite.FlexibleDataModels.ExcludeNonReferenced = false; - tester.Config.Cognite.FlexibleDataModels.TypesToMap = TypesToMap.Referenced; + tester.Config.Cognite.MetadataTargets.FlexibleDataModels.ExcludeNonReferenced = false; + tester.Config.Cognite.MetadataTargets.FlexibleDataModels.TypesToMap = TypesToMap.Referenced; var (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); @@ -115,8 +116,8 @@ public async Task TestMapReferencedTypesNoTrim() [Fact] public async Task TestMapEverything() { - tester.Config.Cognite.FlexibleDataModels.ExcludeNonReferenced = false; - tester.Config.Cognite.FlexibleDataModels.TypesToMap = TypesToMap.All; + tester.Config.Cognite.MetadataTargets.FlexibleDataModels.ExcludeNonReferenced = false; + tester.Config.Cognite.MetadataTargets.FlexibleDataModels.TypesToMap = TypesToMap.All; var (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); diff --git a/Test/Unit/MQTTBridgeTests.cs b/Test/Unit/MQTTBridgeTests.cs index d910753b..e591cd5b 100644 --- a/Test/Unit/MQTTBridgeTests.cs +++ b/Test/Unit/MQTTBridgeTests.cs @@ -772,14 +772,14 @@ public async Task TestMqttRaw() }; await tester.PublishRawAssets(roundOne); - Assert.Equal(2, tester.Handler.AssetRaw.Count); - Assert.True(tester.Handler.AssetRaw.ContainsKey("test-asset-1")); - Assert.True(tester.Handler.AssetRaw.ContainsKey("test-asset-2")); + Assert.Equal(2, tester.Handler.AssetsRaw.Count); + Assert.True(tester.Handler.AssetsRaw.ContainsKey("test-asset-1")); + Assert.True(tester.Handler.AssetsRaw.ContainsKey("test-asset-2")); await tester.PublishRawAssets(roundTwo); - Assert.Equal(3, tester.Handler.AssetRaw.Count); - Assert.Contains(tester.Handler.AssetRaw, kvp => kvp.Value.GetProperty("name").GetString() == "test-asset-3"); - Assert.True(tester.Handler.AssetRaw.ContainsKey("test-asset-1")); - var asset1 = tester.Handler.AssetRaw["test-asset-1"]; + Assert.Equal(3, tester.Handler.AssetsRaw.Count); + Assert.Contains(tester.Handler.AssetsRaw, kvp => kvp.Value.GetProperty("name").GetString() == "test-asset-3"); + Assert.True(tester.Handler.AssetsRaw.ContainsKey("test-asset-1")); + var asset1 = tester.Handler.AssetsRaw["test-asset-1"]; Assert.Equal(@"{""test-prop"":""test-value""}", asset1.GetProperty("metadata").ToString()); } diff --git a/Test/Unit/MQTTPusherTest.cs b/Test/Unit/MQTTPusherTest.cs index 5e61ae26..e95aa97b 100644 --- a/Test/Unit/MQTTPusherTest.cs +++ b/Test/Unit/MQTTPusherTest.cs @@ -318,8 +318,8 @@ public async Task TestCreateUpdateRawAssets() var waitTask = bridge.WaitForNextMessage(); Assert.True((await pusher.PushNodes(new[] { node }, tss, rels, update, tester.Source.Token)).Objects); await waitTask; - Assert.Single(handler.AssetRaw); - Assert.Equal("BaseRoot", handler.AssetRaw.First().Value.GetProperty("name").GetString()); + Assert.Single(handler.AssetsRaw); + Assert.Equal("BaseRoot", handler.AssetsRaw.First().Value.GetProperty("name").GetString()); // Create another, do not overwrite the existing one, due to no update settings var node2 = new UAObject(tester.Server.Ids.Custom.Root, "CustomRoot", null, null, NodeId.Null, null); @@ -327,17 +327,17 @@ public async Task TestCreateUpdateRawAssets() waitTask = bridge.WaitForNextMessage(); Assert.True((await pusher.PushNodes(new[] { node, node2 }, tss, rels, update, tester.Source.Token)).Objects); await waitTask; - Assert.Equal(2, handler.AssetRaw.Count); - Assert.Null(handler.AssetRaw.First().Value.GetProperty("description").GetString()); - Assert.Null(handler.AssetRaw.Last().Value.GetProperty("description").GetString()); + Assert.Equal(2, handler.AssetsRaw.Count); + Assert.Null(handler.AssetsRaw.First().Value.GetProperty("description").GetString()); + Assert.Null(handler.AssetsRaw.Last().Value.GetProperty("description").GetString()); // Try to create again, skip both waitTask = bridge.WaitForNextMessage(1); Assert.True((await pusher.PushNodes(new[] { node, node2 }, tss, rels, update, tester.Source.Token)).Objects); await Assert.ThrowsAsync(() => waitTask); - Assert.Equal(2, handler.AssetRaw.Count); - Assert.Null(handler.AssetRaw.First().Value.GetProperty("description").GetString()); - Assert.Null(handler.AssetRaw.Last().Value.GetProperty("description").GetString()); + Assert.Equal(2, handler.AssetsRaw.Count); + Assert.Null(handler.AssetsRaw.First().Value.GetProperty("description").GetString()); + Assert.Null(handler.AssetsRaw.Last().Value.GetProperty("description").GetString()); // Update due to update settings update.Objects.Description = true; @@ -345,9 +345,9 @@ public async Task TestCreateUpdateRawAssets() waitTask = bridge.WaitForNextMessage(); Assert.True((await pusher.PushNodes(new[] { node, node2 }, tss, rels, update, tester.Source.Token)).Objects); await waitTask; - Assert.Equal(2, handler.AssetRaw.Count); - Assert.Equal("description", handler.AssetRaw.First().Value.GetProperty("description").GetString()); - Assert.Equal("description", handler.AssetRaw.Last().Value.GetProperty("description").GetString()); + Assert.Equal(2, handler.AssetsRaw.Count); + Assert.Equal("description", handler.AssetsRaw.First().Value.GetProperty("description").GetString()); + Assert.Equal("description", handler.AssetsRaw.Last().Value.GetProperty("description").GetString()); Assert.True(CommonTestUtils.TestMetricValue("opcua_node_ensure_failures_mqtt", 0)); } From 70fe5448cfaf8b635dff2aa18938af6b64328e9d Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Sun, 9 Jul 2023 11:51:31 +0200 Subject: [PATCH 11/26] docs: include methods documentations --- Extractor/Pushers/CDFPusher.cs | 351 +++++++++++------- Extractor/Pushers/Writers/AssetsWriter.cs | 56 ++- .../Pushers/Writers/Interfaces/IRawWriter.cs | 50 ++- .../Interfaces/IRelationshipsWriter.cs | 23 ++ .../Writers/Interfaces/ITimeseriesWriter.cs | 27 ++ .../Writers/MinimalTimeseriesWriter.cs | 17 + Extractor/Pushers/Writers/RawWriter.cs | 113 +++++- .../Pushers/Writers/RelationshipsWriter.cs | 35 +- Extractor/Pushers/Writers/TimeseriesWriter.cs | 73 +++- 9 files changed, 565 insertions(+), 180 deletions(-) diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index 7029cea3..0a5f58cc 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -291,7 +291,9 @@ public CDFPusher( /// /// List of objects to be synchronized /// List of variables to be synchronized + /// List of references to be synchronized /// Configuration of what fields, if any, should be updated. + /// Cancellation token /// True if no operation failed unexpectedly public async Task PushNodes(IEnumerable objects, IEnumerable variables, IEnumerable references, UpdateConfig update, CancellationToken token) @@ -373,6 +375,15 @@ public async Task PushNodes(IEnumerable objects, return result; } + /// + /// Synchronized all objects, variables and references with FDM + /// + /// List of objects to be synchronized + /// List of variables to synchronize + /// List of references to synchronize + /// Push result + /// Cancellation token + /// Task private async Task PushFdm(IEnumerable objects, IEnumerable variables, IEnumerable references, PushResult result, CancellationToken token) { bool pushResult = true; @@ -389,134 +400,6 @@ private async Task PushFdm(IEnumerable objects, IEnumerable MapAssets(IEnumerable objects) - { - return config.SkipMetadata ? - new ConcurrentDictionary() : - new ConcurrentDictionary( - objects - .Where(node => node.Source != NodeSource.CDF) - .ToDictionary(obj => Extractor.GetUniqueId(obj.Id)!) - ); - } - - private ConcurrentDictionary MapTimeseries(IEnumerable variables) - { - return new ConcurrentDictionary( - variables.ToDictionary(ts => ts.GetUniqueId(Extractor)!) - ); - } - - private async Task PushAssets(IEnumerable objects, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) - { - if (!objects.Any()) return; - - var assetsMap = MapAssets(objects); - if (CleanMetadataTargetConfig?.Assets ?? false) - { - await PushCleanAssets(assetsMap, update, report, result, token); - } - if (RawMetadataTargetConfig?.Database != null && RawMetadataTargetConfig?.AssetsTable != null) - { - await PushRawAssets(assetsMap, update, report, result, token); - } - } - - private async Task PushTimeseries(IEnumerable variables, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) - { - if (!variables.Any()) return; - - var timeseriesMap = MapTimeseries(variables); - await PushCleanTimeseries(timeseriesMap, update, report, result, token); - if ((RawMetadataTargetConfig?.Database != null) && (RawMetadataTargetConfig?.TimeseriesTable != null)) - { - await PushRawTimeseries(timeseriesMap, update, report, result, token); - } - } - - private async Task PushReferences(IEnumerable references, BrowseReport report, PushResult result, CancellationToken token) - { - if (!references.Any()) return; - - var relationships = references - .Select(reference => reference.ToRelationship(config.DataSet?.Id, Extractor)) - .DistinctBy(rel => rel.ExternalId); - - if (CleanMetadataTargetConfig?.Relationships ?? false) - { - await PushCleanReferences(relationships, report, result, token); - } - - if (RawMetadataTargetConfig?.Database != null && RawMetadataTargetConfig?.RelationshipsTable != null) - { - await PushRawReferences(relationships, report, result, token); - } - } - - private async Task PushCleanAssets(IDictionary assetsMap, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) - { - try - { - var _result = await cdfWriter.Assets.PushNodes(Extractor, assetsMap, nodeToAssetIds, update, token); - report.AssetsCreated += _result.Created; - report.AssetsUpdated += _result.Updated; - } - catch - { - result.Objects = false; - } - } - - private async Task PushCleanTimeseries(IDictionary timeseriesMap, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) - { - try - { - var createMinimal = !(CleanMetadataTargetConfig?.Timeseries ?? false); - var writer = createMinimal ? cdfWriter.MinimalTimeseries : cdfWriter.Timeseries; - var _result = await writer.PushVariables(Extractor, timeseriesMap, nodeToAssetIds, mismatchedTimeseries, update, token); - if (createMinimal) - { - report.MinimalTimeSeriesCreated += _result.Created; - } - else - { - report.TimeSeriesCreated += _result.Created; - } - report.TimeSeriesUpdated += _result.Updated; - } - catch - { - result.Variables = false; - } - } - - private async Task PushCleanReferences(IEnumerable relationships, BrowseReport report, PushResult result, CancellationToken token) - { - try - { - var _result = await cdfWriter.Relationships.PushReferences(relationships, token); - report.RelationshipsCreated += _result.Created; - } - catch (Exception e) - { - log.LogError(e, "Failed to ensure relationships"); - result.References = false; - } - } - - private async Task PushRawReferences(IEnumerable relationships, BrowseReport report, PushResult result, CancellationToken token) - { - try - { - var _result = await cdfWriter.Raw.PushReferences(RawMetadataTargetConfig!.Database!, RawMetadataTargetConfig!.RelationshipsTable!, relationships, token); - report.RawRelationshipsCreated += _result.Created; - } catch (Exception e) - { - log.LogError(e, "Failed to ensure raw relationships"); - result.RawReferences = false; - } - } - /// /// Reset the pusher, preparing it to be restarted /// @@ -730,6 +613,69 @@ public async Task ExecuteDeletes(DeletedNodes deletes, CancellationToken t #endregion #region assets + /// + /// Maps objects to their keys while filtering + /// + /// List of objects to be mapped + /// A dictionary of mapping + private ConcurrentDictionary MapAssets(IEnumerable objects) + { + return config.SkipMetadata ? + new ConcurrentDictionary() : + new ConcurrentDictionary( + objects + .Where(node => node.Source != NodeSource.CDF) + .ToDictionary(obj => Extractor.GetUniqueId(obj.Id)!) + ); + } + + /// + /// Synchronize all objects to CDF + /// + /// List of objects to be synchronized + /// Update configuration + /// Browse report + /// Push result + /// Cancellation token + /// Task + private async Task PushAssets(IEnumerable objects, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) + { + if (!objects.Any()) return; + + var assetsMap = MapAssets(objects); + if (CleanMetadataTargetConfig?.Assets ?? false) + { + await PushCleanAssets(assetsMap, update, report, result, token); + } + if (RawMetadataTargetConfig?.Database != null && RawMetadataTargetConfig?.AssetsTable != null) + { + await PushRawAssets(assetsMap, update, report, result, token); + } + } + + /// + /// Synchronize all objects to CDF assets + /// + /// Synchronizes all objects maps to CDF assets + /// Update configuration + /// Browse report + /// Push result + /// Cancellation token + /// Task + private async Task PushCleanAssets(IDictionary assetsMap, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) + { + try + { + var _result = await cdfWriter.Assets.PushNodes(Extractor, assetsMap, nodeToAssetIds, update, token); + report.AssetsCreated += _result.Created; + report.AssetsUpdated += _result.Updated; + } + catch + { + result.Objects = false; + } + } + /// /// Master method for pushing assets to CDF raw. /// @@ -811,15 +757,83 @@ await MarkRawRowsAsDeleted( result.ThrowOnFatal(); } } - #endregion #region timeseries /// - /// Master method for pushing timeseries to CDF raw or clean. + /// Maps variables to their keys /// - /// Timeseries to push - /// Configuration for which fields, if any, to update in CDF + /// List of variables to be mapped + /// A dictionary of mapping + private ConcurrentDictionary MapTimeseries(IEnumerable variables) + { + return new ConcurrentDictionary( + variables.ToDictionary(ts => ts.GetUniqueId(Extractor)!) + ); + } + + /// + /// Synchronize all variables to CDF + /// + /// List of variables to be synchronized + /// Update configuration + /// Browse report + /// Push result + /// Cancellation token + /// Task + private async Task PushTimeseries(IEnumerable variables, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) + { + if (!variables.Any()) return; + + var timeseriesMap = MapTimeseries(variables); + await PushCleanTimeseries(timeseriesMap, update, report, result, token); + if ((RawMetadataTargetConfig?.Database != null) && (RawMetadataTargetConfig?.TimeseriesTable != null)) + { + await PushRawTimeseries(timeseriesMap, update, report, result, token); + } + } + + /// + /// Synchronize all variables to CDF timeseries + /// + /// Synchronizes all variable maps to CDF timeseries + /// Update configuration + /// Browse report + /// Push result + /// Cancellation token + /// Task + private async Task PushCleanTimeseries(IDictionary timeseriesMap, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) + { + try + { + var createMinimal = !(CleanMetadataTargetConfig?.Timeseries ?? false); + var writer = createMinimal ? cdfWriter.MinimalTimeseries : cdfWriter.Timeseries; + var _result = await writer.PushVariables(Extractor, timeseriesMap, nodeToAssetIds, mismatchedTimeseries, update, token); + if (createMinimal) + { + report.MinimalTimeSeriesCreated += _result.Created; + } + else + { + report.TimeSeriesCreated += _result.Created; + } + report.TimeSeriesUpdated += _result.Updated; + } + catch + { + result.Variables = false; + } + } + + /// + /// Synchronize all variables to CDF raw + /// + /// Synchronizes all variables maps to CDF raw + /// Update configuration + /// Browse report + /// Push result + /// Cancellation token + /// Task private async Task PushRawTimeseries(ConcurrentDictionary tsIds, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) { try @@ -959,6 +973,79 @@ await destination.InsertRawRowsAsync( #endregion #region references + /// + /// Synchronize all references to CDF + /// + /// List of references to be synchronized + /// Update configuration + /// Browse report + /// Push result + /// Cancellation token + /// Task + private async Task PushReferences(IEnumerable references, BrowseReport report, PushResult result, CancellationToken token) + { + if (!references.Any()) return; + + var relationships = references + .Select(reference => reference.ToRelationship(config.DataSet?.Id, Extractor)) + .DistinctBy(rel => rel.ExternalId); + + if (CleanMetadataTargetConfig?.Relationships ?? false) + { + await PushCleanReferences(relationships, report, result, token); + } + + if (RawMetadataTargetConfig?.Database != null && RawMetadataTargetConfig?.RelationshipsTable != null) + { + await PushRawReferences(relationships, report, result, token); + } + } + + /// + /// Synchronize all references to CDF relationship + /// + /// Synchronizes all references maps to CDF relationships + /// Update configuration + /// Browse report + /// Push result + /// Cancellation token + /// Task + private async Task PushCleanReferences(IEnumerable relationships, BrowseReport report, PushResult result, CancellationToken token) + { + try + { + var _result = await cdfWriter.Relationships.PushReferences(relationships, token); + report.RelationshipsCreated += _result.Created; + } + catch (Exception e) + { + log.LogError(e, "Failed to ensure relationships"); + result.References = false; + } + } + + /// + /// Synchronize all references to CDF + /// + /// Synchronizes all references maps to CDF assets + /// Update configuration + /// Browse report + /// Push result + /// Cancellation token + /// Task + private async Task PushRawReferences(IEnumerable relationships, BrowseReport report, PushResult result, CancellationToken token) + { + try + { + var _result = await cdfWriter.Raw.PushReferences(RawMetadataTargetConfig!.Database!, RawMetadataTargetConfig!.RelationshipsTable!, relationships, token); + report.RawRelationshipsCreated += _result.Created; + } catch (Exception e) + { + log.LogError(e, "Failed to ensure raw relationships"); + result.RawReferences = false; + } + } + private async Task MarkReferencesAsDeleted( IEnumerable externalIds, CancellationToken token diff --git a/Extractor/Pushers/Writers/AssetsWriter.cs b/Extractor/Pushers/Writers/AssetsWriter.cs index b742f593..a86aa5eb 100644 --- a/Extractor/Pushers/Writers/AssetsWriter.cs +++ b/Extractor/Pushers/Writers/AssetsWriter.cs @@ -1,3 +1,20 @@ +/* Cognite Extractor for OPC-UA +Copyright (C) 2021 Cognite AS + +This program is free software; you can redistribute it and/or +modify it under the terms of the GNU General Public License +as published by the Free Software Foundation; either version 2 +of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ + using System.Collections.Generic; using System.Linq; using System.Threading; @@ -28,7 +45,17 @@ public AssetsWriter(ILogger logger, CogniteDestination destination this.destination = destination; } - public async Task PushNodes(UAExtractor extractor, IDictionary nodes, IDictionary nodeToAssetIds, TypeUpdateConfig update, CancellationToken token) + /// + /// Synchronizes all BaseUANode to CDF assets + /// + /// UAExtractor instance + /// Dictionary of mapping of variables to keys + /// Node to assets to ids + /// Type update configuration + /// Cancellation token + /// Operation result + public async Task PushNodes(UAExtractor extractor, IDictionary nodes, + IDictionary nodeToAssetIds, TypeUpdateConfig update, CancellationToken token) { var result = new Result { Created = 0, Updated = 0 }; var assets = await CreateAssets(extractor, nodes, nodeToAssetIds, result, token); @@ -39,8 +66,18 @@ public async Task PushNodes(UAExtractor extractor, IDictionary> CreateAssets(UAExtractor extractor, IDictionary assetMap, IDictionary nodeToAssetIds, Result result, CancellationToken token) + + /// + /// Create all BaseUANode to CDF assets + /// + /// UAExtractor instance + /// Dictionary of mapping of variables to keys + /// Node to assets to ids + /// Operation result + /// Cancellation token + /// Future list of assets + private async Task> CreateAssets(UAExtractor extractor, + IDictionary assetMap, IDictionary nodeToAssetIds, Result result, CancellationToken token) { var assets = new List(); var maxSize = config.Cognite?.CdfChunking.Assets ?? 1000; @@ -75,7 +112,18 @@ private async Task> CreateAssets(UAExtractor extractor, IDict return assets; } - private async Task UpdateAssets(UAExtractor extractor, IDictionary assetMap, IEnumerable assets, TypeUpdateConfig update, Result result, CancellationToken token) + /// + /// Update all BaseUANode to CDF assets + /// + /// UAExtractor instance + /// Dictionary of mapping of variables to keys + /// List of assets + /// Type update configuration + /// Operation result + /// Cancellation token + /// Future list of assets + private async Task UpdateAssets(UAExtractor extractor, IDictionary assetMap, + IEnumerable assets, TypeUpdateConfig update, Result result, CancellationToken token) { var updates = new List(); var existing = assets.ToDictionary(asset => asset.ExternalId); diff --git a/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs b/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs index 71730d8b..07c7bf70 100644 --- a/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/IRawWriter.cs @@ -1,3 +1,20 @@ +/* Cognite Extractor for OPC-UA +Copyright (C) 2021 Cognite AS + +This program is free software; you can redistribute it and/or +modify it under the terms of the GNU General Public License +as published by the Free Software Foundation; either version 2 +of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ + using System.Collections.Generic; using System.Text.Json; using System.Threading; @@ -14,13 +31,32 @@ public interface IRawWriter static JsonSerializerOptions options => new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; - Task>>> GetRawRows( + /// + /// Get all rows from CDF + /// + /// Name of metadata database in CDF + /// Name of metadata table in CDF + /// Columns + /// Cancellation token + /// A dictionary of JsonElement + Task>>> GetRows( string dbName, string tableName, IEnumerable? columns, CancellationToken token ); + /// + /// Synchronizes all BaseUANode to CDF raw + /// + /// UAExtractor instance + /// Name of metadata database in CDF + /// Name of metadata table in CDF + /// Dictionary map of BaseUANode of their keys + /// Converter + /// Indicates if it is an update operation + /// Cancellation token + /// Operation result Task PushNodes( UAExtractor extractor, string database, @@ -32,6 +68,18 @@ CancellationToken token ) where T : BaseUANode; + /// + /// Updates all BaseUANode to CDF raw + /// + /// UAExtractor instance + /// Name of metadata database in CDF + /// Name of metadata table in CDF + /// Dictionary map of BaseUANode of their keys + /// Converter + /// Operation result + /// Indicates if it is an update operation + /// Cancellation token + /// Task Task PushReferences( string database, string table, diff --git a/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs b/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs index bebeacc6..044b6d65 100644 --- a/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/IRelationshipsWriter.cs @@ -1,3 +1,20 @@ +/* Cognite Extractor for OPC-UA +Copyright (C) 2021 Cognite AS + +This program is free software; you can redistribute it and/or +modify it under the terms of the GNU General Public License +as published by the Free Software Foundation; either version 2 +of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ + using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; @@ -8,6 +25,12 @@ namespace Cognite.OpcUa.Pushers.Writers.Interfaces { public interface IRelationshipsWriter { + /// + /// Push all refernces to CDF relationship + /// + /// List of sanitized references + /// Cancellation token + /// A result reporting items created/updated Task PushReferences(IEnumerable relationships, CancellationToken token); } } diff --git a/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs b/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs index ecb71c29..5514ba53 100644 --- a/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/ITimeseriesWriter.cs @@ -1,3 +1,20 @@ +/* Cognite Extractor for OPC-UA +Copyright (C) 2021 Cognite AS + +This program is free software; you can redistribute it and/or +modify it under the terms of the GNU General Public License +as published by the Free Software Foundation; either version 2 +of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ + using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; @@ -10,6 +27,16 @@ namespace Cognite.OpcUa.Pushers.Writers.Interfaces { public interface ITimeseriesWriter { + /// + /// Synchronizes all BaseUANode to CDF Timeseries + /// + /// UAExtractor instance + /// Dictionary of mapping of variables to keys + /// Node to assets to ids + /// Mismatched timeseries + /// Type update configuration + /// Cancellation token + /// Operation result Task PushVariables( UAExtractor extractor, IDictionary timeseriesMap, diff --git a/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs b/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs index bc46efbe..121b0dba 100644 --- a/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs @@ -1,3 +1,20 @@ +/* Cognite Extractor for OPC-UA +Copyright (C) 2021 Cognite AS + +This program is free software; you can redistribute it and/or +modify it under the terms of the GNU General Public License +as published by the Free Software Foundation; either version 2 +of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ + using Cognite.Extractor.Utils; using Cognite.OpcUa.Config; using Cognite.OpcUa.Pushers.Writers.Interfaces; diff --git a/Extractor/Pushers/Writers/RawWriter.cs b/Extractor/Pushers/Writers/RawWriter.cs index ef72ce78..28d7d769 100644 --- a/Extractor/Pushers/Writers/RawWriter.cs +++ b/Extractor/Pushers/Writers/RawWriter.cs @@ -1,3 +1,20 @@ +/* Cognite Extractor for OPC-UA +Copyright (C) 2021 Cognite AS + +This program is free software; you can redistribute it and/or +modify it under the terms of the GNU General Public License +as published by the Free Software Foundation; either version 2 +of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ + using System; using System.Collections.Generic; using System.Linq; @@ -28,7 +45,15 @@ public RawWriter(ILogger log, CogniteDestination destination, FullCon this.destination = destination; } - public async Task>>> GetRawRows( + /// + /// Get all rows from CDF + /// + /// Name of metadata database in CDF + /// Name of metadata table in CDF + /// Columns + /// Cancellation token + /// A dictionary of JsonElement + public async Task>>> GetRows( string dbName, string tableName, IEnumerable? columns, @@ -46,12 +71,7 @@ CancellationToken token >( dbName, tableName, - new RawRowQuery - { - Cursor = cursor, - Limit = 10_000, - Columns = columns - }, + new RawRowQuery { Cursor = cursor, Limit = 10_000, Columns = columns }, null, token ); @@ -67,22 +87,47 @@ CancellationToken token return rows; } - public async Task PushNodes(UAExtractor extractor, string database, string table, IDictionary rows, ConverterType converter, bool shouldUpdate, CancellationToken token) where T : BaseUANode + /// + /// Synchronizes all BaseUANode to CDF raw + /// + /// UAExtractor instance + /// Name of metadata database in CDF + /// Name of metadata table in CDF + /// Dictionary map of BaseUANode of their keys + /// Converter + /// Indicates if it is an update operation + /// Cancellation token + /// Operation result + public async Task PushNodes(UAExtractor extractor, string database, string table, + IDictionary rows, ConverterType converter, bool shouldUpdate, CancellationToken token) where T : BaseUANode { var result = new Result { Created = 0, Updated = 0 }; if (shouldUpdate) { - await Update(extractor, database, table, rows, converter, result, token); + await UpdateRows(extractor, database, table, rows, converter, result, token); } else { - await Create(extractor, database, table, rows, converter, result, token); + await CreateRows(extractor, database, table, rows, converter, result, token); } return result; } - private async Task Update(UAExtractor extractor, string database, string table, IDictionary dataSet, ConverterType converter, Result result, CancellationToken token) where T : BaseUANode + /// + /// Updates all BaseUANode to CDF raw + /// + /// UAExtractor instance + /// Name of metadata database in CDF + /// Name of metadata table in CDF + /// Dictionary map of BaseUANode of their keys + /// Converter + /// Operation result + /// Indicates if it is an update operation + /// Cancellation token + /// Task + private async Task UpdateRows(UAExtractor extractor, string database, string table, + IDictionary dataSet, ConverterType converter, Result result, CancellationToken token) where T : BaseUANode { await UpsertRows( database, @@ -139,7 +184,20 @@ await UpsertRows( ); } - private async Task Create(UAExtractor extractor, string database, string table, IDictionary dataMap, ConverterType converter, Result result, CancellationToken token) where T : BaseUANode + /// + /// Creates all BaseUANode to CDF raw + /// + /// UAExtractor instance + /// Name of metadata database in CDF + /// Name of metadata table in CDF + /// Dictionary map of BaseUANode of their keys + /// Converter + /// Operation result + /// Indicates if it is an update operation + /// Cancellation token + /// Task + private async Task CreateRows(UAExtractor extractor, string database, string table, + IDictionary dataMap, ConverterType converter, Result result, CancellationToken token) where T : BaseUANode { await EnsureRows( database, @@ -160,6 +218,15 @@ await EnsureRows( ); } + /// + /// Upserts all BaseUANode to CDF raw + /// + /// Name of metadata database in CDF + /// Name of metadata table in CDF + /// Callback to build the dto + /// Json serialization options + /// Cancellation token + /// Task private async Task UpsertRows( string dbName, string tableName, @@ -209,9 +276,20 @@ async Task CallAndCreate(IEnumerable>>? r log.LogInformation("Updated or created {Count} rows in CDF Raw", count); } - private async Task EnsureRows(string dbName, string tableName, IEnumerable keys, Func, IDictionary> dtoBuilder, JsonSerializerOptions options, CancellationToken token) + /// + /// Ensure all rows in CDF + /// + /// Name of metadata database in CDF + /// Name of metadata table in CDF + /// keys + /// Callback to build the dto + /// Json serialization options + /// Cancellation token + /// Task + private async Task EnsureRows(string dbName, string tableName, IEnumerable keys, + Func, IDictionary> dtoBuilder, JsonSerializerOptions options, CancellationToken token) { - var rows = await GetRawRows(dbName, tableName, new[] { "," }, token); + var rows = await GetRows(dbName, tableName, new[] { "," }, token); var existing = rows.Select(row => row.Key); var toCreate = keys.Except(existing); @@ -224,12 +302,7 @@ private async Task EnsureRows(string dbName, string tableName, IEnumerable PushReferences( - string database, - string table, - IEnumerable relationships, - CancellationToken token - ) + public async Task PushReferences(string database, string table, IEnumerable relationships, CancellationToken token) { var result = new Result { Created = 0, Updated = 0 }; await EnsureRows( diff --git a/Extractor/Pushers/Writers/RelationshipsWriter.cs b/Extractor/Pushers/Writers/RelationshipsWriter.cs index ee6fe953..dfd025be 100644 --- a/Extractor/Pushers/Writers/RelationshipsWriter.cs +++ b/Extractor/Pushers/Writers/RelationshipsWriter.cs @@ -1,3 +1,20 @@ +/* Cognite Extractor for OPC-UA +Copyright (C) 2021 Cognite AS + +This program is free software; you can redistribute it and/or +modify it under the terms of the GNU General Public License +as published by the Free Software Foundation; either version 2 +of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ + using System.Collections.Generic; using System.Linq; using System.Threading; @@ -18,17 +35,19 @@ public class RelationshipsWriter : IRelationshipsWriter private readonly FullConfig config; private readonly CogniteDestination destination; - public RelationshipsWriter( - ILogger logger, - CogniteDestination destination, - FullConfig config - ) + public RelationshipsWriter( ILogger logger, CogniteDestination destination, FullConfig config) { this.log = logger; this.config = config; this.destination = destination; } + /// + /// Push all refernces to CDF relationship + /// + /// List of sanitized references + /// Cancellation token + /// A result reporting items created/updated public async Task PushReferences(IEnumerable relationships, CancellationToken token) { var result = new Result{ Created = 0, Updated = 0 }; @@ -39,6 +58,12 @@ public async Task PushReferences(IEnumerable relatio return result; } + /// + /// Push all references in chunks + /// + /// List of sanitized references + /// Cancellation token + /// Task private async Task PushReferencesChunk(IEnumerable relationships, CancellationToken token) { if (!relationships.Any()) diff --git a/Extractor/Pushers/Writers/TimeseriesWriter.cs b/Extractor/Pushers/Writers/TimeseriesWriter.cs index 2444e801..82dab601 100644 --- a/Extractor/Pushers/Writers/TimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/TimeseriesWriter.cs @@ -1,3 +1,20 @@ +/* Cognite Extractor for OPC-UA +Copyright (C) 2021 Cognite AS + +This program is free software; you can redistribute it and/or +modify it under the terms of the GNU General Public License +as published by the Free Software Foundation; either version 2 +of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ + using System; using System.Collections.Generic; using System.Linq; @@ -30,14 +47,18 @@ public TimeseriesWriter(ILogger logger, CogniteDestination des this.destination = destination; } - public virtual async Task PushVariables( - UAExtractor extractor, - IDictionary timeseriesMap, - IDictionary nodeToAssetIds, - HashSet mismatchedTimeseries, - TypeUpdateConfig update, - CancellationToken token - ) + /// + /// Synchronizes all BaseUANode to CDF Timeseries + /// + /// UAExtractor instance + /// Dictionary of mapping of variables to keys + /// Node to assets to ids + /// Mismatched timeseries + /// Type update configuration + /// Cancellation token + /// Operation result + public virtual async Task PushVariables(UAExtractor extractor, IDictionary timeseriesMap, + IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, TypeUpdateConfig update, CancellationToken token) { var result = new Result { Created = 0, Updated = 0 }; var timeseries = await CreateTimeseries( @@ -61,15 +82,20 @@ CancellationToken token return result; } - private async Task> CreateTimeseries( - UAExtractor extractor, - IDictionary tsMap, - IDictionary nodeToAssetIds, - HashSet mismatchedTimeseries, - Result result, - bool createMinimalTimeseries, - CancellationToken token - ) + /// + /// Create BaseUANode to CDF Timeseries + /// + /// UAExtractor instance + /// Dictionary of mapping of variables to keys + /// Node to assets to ids + /// Mismatched timeseries + /// Operation result + /// Type update configuration + /// Indicate if to create minimal timeseries + /// Cancellation token + /// Operation result + private async Task> CreateTimeseries(UAExtractor extractor, IDictionary tsMap, + IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, Result result, bool createMinimalTimeseries, CancellationToken token) { var timeseries = await destination.GetOrCreateTimeSeriesAsync( tsMap.Keys, @@ -136,7 +162,18 @@ CancellationToken token return timeseries.Results; } - private async Task UpdateTimeseries(UAExtractor extractor, IDictionary tsMap, IEnumerable timeseries, IDictionary nodeToAssetIds, TypeUpdateConfig update, Result result, CancellationToken token) + /// + /// Update BaseUANode to CDF Timeseries + /// + /// UAExtractor instance + /// Dictionary of mapping of variables to keys + /// Node to assets to ids + /// Type update configuration + /// Operation result + /// Cancellation token + /// Operation result + private async Task UpdateTimeseries(UAExtractor extractor, IDictionary tsMap, + IEnumerable timeseries, IDictionary nodeToAssetIds, TypeUpdateConfig update, Result result, CancellationToken token) { var updates = new List(); var existing = timeseries.ToDictionary(asset => asset.ExternalId); From 39bb59e079bcd1469f9e4b0c5d5de0a6e9cc6526 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Sun, 9 Jul 2023 12:19:38 +0200 Subject: [PATCH 12/26] style: deprecate property --- Extractor/Config/CogniteConfig.cs | 16 +++++++++++++ Extractor/Pushers/CDFPusher.cs | 12 +++++----- ExtractorLauncher/ExtractorStarter.cs | 2 ++ Test/Integration/NodeExtractionTests.cs | 1 - Test/Unit/CDFPusherTest.cs | 30 ------------------------- 5 files changed, 23 insertions(+), 38 deletions(-) diff --git a/Extractor/Config/CogniteConfig.cs b/Extractor/Config/CogniteConfig.cs index 26ab3b81..3b030849 100644 --- a/Extractor/Config/CogniteConfig.cs +++ b/Extractor/Config/CogniteConfig.cs @@ -17,6 +17,7 @@ You should have received a copy of the GNU General Public License using Cognite.Extensions; using Cognite.Extractor.Utils; +using System; using System.Collections.Generic; using System.ComponentModel; using System.ComponentModel.DataAnnotations; @@ -56,6 +57,7 @@ public class CognitePusherConfig : CogniteConfig, IPusherConfig /// similarly to raw-metadata, and datapoints will be pushed. Nothing will be written to raw, and no assets will be created. /// Events will be created, but without asset context. /// + [Obsolete("Deprecated!")] public bool SkipMetadata { get; set; } /// /// Store assets and/or timeseries data in raw. Assets will not be created at all, @@ -65,6 +67,7 @@ public class CognitePusherConfig : CogniteConfig, IPusherConfig /// of the source node is added to metadata if applicable. /// Use different table names for assets and timeseries. /// + [Obsolete("Deprecated! Use MetadataTargetsConfig.RawMetadataTargetConfig instead.")] public RawMetadataConfig? RawMetadata { get; set; } /// /// Map metadata to asset/timeseries attributes. Each of "assets" and "timeseries" is a map from property DisplayName to @@ -114,8 +117,12 @@ public double? NonFiniteReplacement /// /// Configuration for writing to a custom OPC-UA flexible data model. /// + [Obsolete("Deprecated! Use MetadataTargetsConfig.FdmDestinationConfig instead.")] public FdmDestinationConfig? FlexibleDataModels { get; set; } + /// + /// This is the implementation of the metadata targets + /// public MetadataTargetsConfig? MetadataTargets { get; set; } } public class RawMetadataConfig @@ -140,8 +147,17 @@ public class RawMetadataConfig } public class MetadataTargetsConfig { + /// + /// Raw metadata targets config + /// public RawMetadataTargetConfig? RawMetadata { get; set; } + /// + /// Clean metadata targets config + /// public CleanMetadataTargetConfig? CleanMetadata { get; set; } + /// + /// FDM destination config + /// public FdmDestinationConfig? FlexibleDataModels { get; set; } } public class RawMetadataTargetConfig diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index 0a5f58cc..c8af233f 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -620,13 +620,11 @@ public async Task ExecuteDeletes(DeletedNodes deletes, CancellationToken t /// A dictionary of mapping private ConcurrentDictionary MapAssets(IEnumerable objects) { - return config.SkipMetadata ? - new ConcurrentDictionary() : - new ConcurrentDictionary( - objects - .Where(node => node.Source != NodeSource.CDF) - .ToDictionary(obj => Extractor.GetUniqueId(obj.Id)!) - ); + return new ConcurrentDictionary( + objects + .Where(node => node.Source != NodeSource.CDF) + .ToDictionary(obj => Extractor.GetUniqueId(obj.Id)!) + ); } /// diff --git a/ExtractorLauncher/ExtractorStarter.cs b/ExtractorLauncher/ExtractorStarter.cs index 8ab17d7c..13b516e6 100644 --- a/ExtractorLauncher/ExtractorStarter.cs +++ b/ExtractorLauncher/ExtractorStarter.cs @@ -125,6 +125,7 @@ public static class ExtractorStarter { return "subscriptions.keep-alive-count must be greater than 0"; } +#pragma warning disable 0618 if (config.Cognite?.RawMetadata != null) { log.LogWarning("cognite.raw-metadata is deprecated. Use cognite.metadata-targets instead"); @@ -164,6 +165,7 @@ public static class ExtractorStarter config.Cognite.MetadataTargets.FlexibleDataModels = config.Cognite.FlexibleDataModels; } } +#pragma warning restore 0618 return null; } diff --git a/Test/Integration/NodeExtractionTests.cs b/Test/Integration/NodeExtractionTests.cs index 448e986e..9eee0ae7 100644 --- a/Test/Integration/NodeExtractionTests.cs +++ b/Test/Integration/NodeExtractionTests.cs @@ -984,7 +984,6 @@ public async Task TestUpdateFieldsRaw(bool assets, bool timeseries) tester.Server.ResetCustomServer(); tester.Config.Extraction.Update = new UpdateConfig(); - tester.Config.Cognite.RawMetadata = null; tester.Config.Extraction.DataTypes.AllowStringVariables = false; tester.Config.Extraction.DataTypes.MaxArraySize = 0; diff --git a/Test/Unit/CDFPusherTest.cs b/Test/Unit/CDFPusherTest.cs index 018d18a6..15784531 100644 --- a/Test/Unit/CDFPusherTest.cs +++ b/Test/Unit/CDFPusherTest.cs @@ -276,7 +276,6 @@ public async Task TestCreateUpdateAssets() { using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); - tester.Config.Cognite.RawMetadata = null; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { CleanMetadata = new CleanMetadataTargetConfig @@ -344,11 +343,6 @@ public async Task TestCreateRawAssets() using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); - tester.Config.Cognite.RawMetadata = new RawMetadataConfig - { - AssetsTable = "assets", - Database = "metadata" - }; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { CleanMetadata = new CleanMetadataTargetConfig @@ -395,11 +389,6 @@ public async Task TestUpdateRawAssets() using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); - tester.Config.Cognite.RawMetadata = new RawMetadataConfig - { - AssetsTable = "assets", - Database = "metadata" - }; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { CleanMetadata = new CleanMetadataTargetConfig @@ -449,7 +438,6 @@ public async Task TestCreateUpdateTimeseries() { using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); - tester.Config.Cognite.RawMetadata = null; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { CleanMetadata = new CleanMetadataTargetConfig @@ -530,11 +518,6 @@ public async Task TestCreateRawTimeseries() using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); - tester.Config.Cognite.RawMetadata = new RawMetadataConfig - { - TimeseriesTable = "timeseries", - Database = "metadata" - }; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { CleanMetadata = new CleanMetadataTargetConfig @@ -587,11 +570,6 @@ public async Task TestUpdateRawTimeseries() using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); - tester.Config.Cognite.RawMetadata = new RawMetadataConfig - { - TimeseriesTable = "timeseries", - Database = "metadata" - }; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { CleanMetadata = new CleanMetadataTargetConfig @@ -659,7 +637,6 @@ public async Task TestNodeCallback() (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); - tester.Config.Cognite.RawMetadata = null; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { CleanMetadata = new CleanMetadataTargetConfig @@ -755,13 +732,6 @@ public async Task TestRawNodeCallback() ReportOnEmpty = true }; - tester.Config.Cognite.RawMetadata = new RawMetadataConfig - { - TimeseriesTable = "timeseries", - RelationshipsTable = "relationships", - AssetsTable = "assets", - Database = "metadata" - }; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { CleanMetadata = new CleanMetadataTargetConfig From 055eba1b5d735be921e2f21803b329ac3dffc458 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Mon, 10 Jul 2023 09:22:09 +0200 Subject: [PATCH 13/26] docs: json docs --- schema/cognite_config.schema.json | 96 +++++++++++++++++++++++++++++++ 1 file changed, 96 insertions(+) diff --git a/schema/cognite_config.schema.json b/schema/cognite_config.schema.json index 40491567..988c647f 100644 --- a/schema/cognite_config.schema.json +++ b/schema/cognite_config.schema.json @@ -31,12 +31,14 @@ }, "skip-metadata": { "type": "boolean", + "deprecated": true, "description": "Do not push any metadata at all. If this is `true`, plain timeseries without metadata will be created, similarly to `raw-metadata`, and datapoints will be pushed. Nothing will be written to raw, band no assets will be created. Events will be created, but without being contextualized to assets" }, "raw-metadata": { "type": "object", "description": "Store assets/timeseries metadata and relationships in raw. Assets will not be created at all, timeseries will be created with just `externalId`, `isStep`, and `isString`. Both timeseries and assets will be persisted in their entirity to CDF Raw. Datapoints are not affected, events will be created but without being contextualized to assets. The externalId of the source node is added to metadata if applicable", "unevaluatedProperties": false, + "deprecated": true, "required": [ "database" ], "properties": { "database": { @@ -57,6 +59,100 @@ } } }, + "metadata-targets": { + "type": "object", + "description": "Metadata targets for objects, variable and references in CDF.", + "unevaluatedProperties": false, + "properties": { + "raw-metadata": { + "type": "object", + "description": "Details of CDF raw (database and table).", + "unevaluatedProperties": false, + "required": ["database"], + "properties": { + "database": { + "type": "string", + "description": "Database in raw" + }, + "assets-table": { + "type": "string", + "descripion": "Assets table in raw" + }, + "timeseries-table": { + "type": "string", + "description": "Timeseries table in raw" + }, + "relationships-table": { + "type": "string", + "description": "Relationships table in raw" + } + } + }, + "clean-metadata": { + "unevaluatedProperties": false, + "description": "Enable or disable assets, timeseries and/or relationships", + "type": "object", + "properties": { + "assets": { + "type": "boolean", + "description": "Indicates enabled or disabled" + }, + "timeseries": { + "type": "boolean", + "description": "Indicates enabled or disabled" + }, + "relationships": { + "type": "boolean", + "description": "Indicates enabled or disabled" + } + } + }, + "flexible-data-models": { + "unevaluatedProperties": false, + "description": "configuration for flexible data models in CDF", + "type": "object", + "required": ["enabled", "exclude-no-referenced", "types-to-map", "skip-simple-types", "skip-types-on-equal-count", "ignore-mandatory"], + "properties": { + "space": { + "type": "string", + "description": "Space instance in CDF" + }, + "enabled": { + "type": "boolean", + "description": "Should enable" + }, + "exclude-no-referenced": { + "type": "boolean", + "description": "Exclude any node referenced by custom nodes" + }, + "types-to-map": { + "type": "string", + "description": "Types to map to fdm", + "enum": ["referenced", "custom", "all"] + }, + "skip-simple-types": { + "type": "boolean", + "description": "Do not create views without an associated container" + }, + "skip-types-on-equal-count": { + "type": "boolean", + "description": "Skip views if number of views in CDF is equal to number of views in server" + }, + "ignore-mandatory": { + "type": "boolean", + "description": "Allow optional mandatory options" + }, + "connection-target-map": { + "type": "object", + "description": "target connections", + "[A-z0-9-_.]+": { + "type": "string" + } + } + } + } + } + }, "metadata-mapping": { "type": "object", "description": "Map metadata to asset/timeseries attributes. This lets you assign properties in OPC-UA directly to fields like `description` and `unit` in CDF", From 672e7a8273f4628eb110eb1e2b89ff566c9cd092 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Mon, 24 Jul 2023 12:24:28 +0200 Subject: [PATCH 14/26] style: rename config --- Extractor/Config/CogniteConfig.cs | 6 +-- Extractor/NodeSources/NodeSetSource.cs | 2 +- Extractor/NodeSources/UANodeSource.cs | 2 +- Extractor/Nodes/UADataType.cs | 2 +- Extractor/Pushers/CDFPusher.cs | 6 +-- Extractor/Pushers/FDM/FDMWriter.cs | 8 ++-- Extractor/Pushers/FDM/TypeHierarchyBuilder.cs | 4 +- Extractor/Pushers/Writers/TimeseriesWriter.cs | 2 +- ExtractorLauncher/ExtractorStarter.cs | 8 ++-- Test/Integration/NodeExtractionTests.cs | 8 ++-- Test/Unit/CDFPusherTest.cs | 38 +++++++++---------- Test/Unit/DeleteTest.cs | 6 +-- Test/Unit/FDMTests.cs | 18 ++++----- 13 files changed, 55 insertions(+), 55 deletions(-) diff --git a/Extractor/Config/CogniteConfig.cs b/Extractor/Config/CogniteConfig.cs index 3b030849..0551cadb 100644 --- a/Extractor/Config/CogniteConfig.cs +++ b/Extractor/Config/CogniteConfig.cs @@ -150,15 +150,15 @@ public class MetadataTargetsConfig /// /// Raw metadata targets config /// - public RawMetadataTargetConfig? RawMetadata { get; set; } + public RawMetadataTargetConfig? Raw { get; set; } /// /// Clean metadata targets config /// - public CleanMetadataTargetConfig? CleanMetadata { get; set; } + public CleanMetadataTargetConfig? Clean { get; set; } /// /// FDM destination config /// - public FdmDestinationConfig? FlexibleDataModels { get; set; } + public FdmDestinationConfig? DataModels { get; set; } } public class RawMetadataTargetConfig { diff --git a/Extractor/NodeSources/NodeSetSource.cs b/Extractor/NodeSources/NodeSetSource.cs index 4922c320..6224c6dc 100644 --- a/Extractor/NodeSources/NodeSetSource.cs +++ b/Extractor/NodeSources/NodeSetSource.cs @@ -349,7 +349,7 @@ private async Task InitNodes(IEnumerable nodes, CancellationToken to await InitNodes(NodeList, token); - var usesFdm = Config.Cognite?.MetadataTargets?.FlexibleDataModels?.Enabled ?? false; + var usesFdm = Config.Cognite?.MetadataTargets?.DataModels?.Enabled ?? false; if (Config.Extraction.Relationships.Enabled) { diff --git a/Extractor/NodeSources/UANodeSource.cs b/Extractor/NodeSources/UANodeSource.cs index 3c12a746..775c2c6e 100644 --- a/Extractor/NodeSources/UANodeSource.cs +++ b/Extractor/NodeSources/UANodeSource.cs @@ -110,7 +110,7 @@ private async Task InitNodes(IEnumerable nodes, CancellationToken to await InitNodes(NodeList, token); - var usesFdm = Config.Cognite?.MetadataTargets?.FlexibleDataModels?.Enabled ?? false; + var usesFdm = Config.Cognite?.MetadataTargets?.DataModels?.Enabled ?? false; if (Config.Extraction.Relationships.Enabled) { diff --git a/Extractor/Nodes/UADataType.cs b/Extractor/Nodes/UADataType.cs index 97cb27d5..121124f9 100644 --- a/Extractor/Nodes/UADataType.cs +++ b/Extractor/Nodes/UADataType.cs @@ -38,7 +38,7 @@ public DataTypeAttributes() : base(NodeClass.DataType) public override IEnumerable GetAttributeSet(FullConfig config) { - if (config.Cognite?.MetadataTargets?.FlexibleDataModels?.Enabled ?? false) + if (config.Cognite?.MetadataTargets?.DataModels?.Enabled ?? false) { yield return Attributes.DataTypeDefinition; } diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index c8af233f..ebbea691 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -74,8 +74,8 @@ public sealed class CDFPusher : IPusher private readonly BrowseCallback? callback; private readonly FDMWriter? fdmDestination; - private RawMetadataTargetConfig? RawMetadataTargetConfig => fullConfig.Cognite?.MetadataTargets?.RawMetadata; - private CleanMetadataTargetConfig? CleanMetadataTargetConfig => fullConfig.Cognite?.MetadataTargets?.CleanMetadata; + private RawMetadataTargetConfig? RawMetadataTargetConfig => fullConfig.Cognite?.MetadataTargets?.Raw; + private CleanMetadataTargetConfig? CleanMetadataTargetConfig => fullConfig.Cognite?.MetadataTargets?.Clean; public CDFPusher( @@ -96,7 +96,7 @@ public CDFPusher( { callback = new BrowseCallback(destination, config.BrowseCallback, log); } - if (config.MetadataTargets?.FlexibleDataModels != null && (config.MetadataTargets?.FlexibleDataModels.Enabled ?? false)) + if (config.MetadataTargets?.DataModels != null && (config.MetadataTargets?.DataModels.Enabled ?? false)) { fdmDestination = new FDMWriter(provider.GetRequiredService(), destination, provider.GetRequiredService>()); diff --git a/Extractor/Pushers/FDM/FDMWriter.cs b/Extractor/Pushers/FDM/FDMWriter.cs index 06dedf87..68a82fcc 100644 --- a/Extractor/Pushers/FDM/FDMWriter.cs +++ b/Extractor/Pushers/FDM/FDMWriter.cs @@ -46,7 +46,7 @@ public FDMWriter(FullConfig config, CogniteDestination destination, ILogger instances, int chunkSize, CancellationToken token) @@ -91,7 +91,7 @@ private async Task Initialize(FDMTypeBatch types, CancellationToken token) var options = new JsonSerializerOptions(Oryx.Cognite.Common.jsonOptions) { WriteIndented = true }; var viewsToInsert = types.Views.Values.ToList(); - if (config.Cognite!.MetadataTargets!.FlexibleDataModels!.SkipSimpleTypes) + if (config.Cognite!.MetadataTargets!.DataModels!.SkipSimpleTypes) { viewsToInsert = viewsToInsert.Where(v => v.Properties.Any() || types.ViewIsReferenced.GetValueOrDefault(v.ExternalId)).ToList(); } @@ -108,7 +108,7 @@ private async Task Initialize(FDMTypeBatch types, CancellationToken token) if (config.DryRun) return; // Check if the data model exists - if (config.Cognite!.MetadataTargets!.FlexibleDataModels!.SkipTypesOnEqualCount) + if (config.Cognite!.MetadataTargets!.DataModels!.SkipTypesOnEqualCount) { try { @@ -237,7 +237,7 @@ public async Task PushNodes( log.LogInformation("Mapped out {Nodes} nodes and {Edges} edges to write to PG3", nodes.Count, finalReferences.Count); // Run the node filter unless we are writing everything. - if (config.Cognite!.MetadataTargets!.FlexibleDataModels!.ExcludeNonReferenced) + if (config.Cognite!.MetadataTargets!.DataModels!.ExcludeNonReferenced) { var trimmer = new NodeTrimmer(nodeHierarchy, config, log); nodeHierarchy = trimmer.Filter(); diff --git a/Extractor/Pushers/FDM/TypeHierarchyBuilder.cs b/Extractor/Pushers/FDM/TypeHierarchyBuilder.cs index e27ff39e..17399d24 100644 --- a/Extractor/Pushers/FDM/TypeHierarchyBuilder.cs +++ b/Extractor/Pushers/FDM/TypeHierarchyBuilder.cs @@ -224,8 +224,8 @@ public TypeHierarchyBuilder(ILogger log, DMSValueConverter converter, FullConfig this.log = log; this.config = config; nodeTypes = new NodeTypeCollector(log); - space = config.Cognite!.MetadataTargets!.FlexibleDataModels!.Space!; - fdmConfig = config.Cognite!.MetadataTargets!.FlexibleDataModels!; + space = config.Cognite!.MetadataTargets!.DataModels!.Space!; + fdmConfig = config.Cognite!.MetadataTargets!.DataModels!; this.converter = converter; } diff --git a/Extractor/Pushers/Writers/TimeseriesWriter.cs b/Extractor/Pushers/Writers/TimeseriesWriter.cs index 82dab601..257dd950 100644 --- a/Extractor/Pushers/Writers/TimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/TimeseriesWriter.cs @@ -38,7 +38,7 @@ public class TimeseriesWriter : ITimeseriesWriter private ILogger log; private readonly FullConfig config; private readonly CogniteDestination destination; - protected virtual bool createMinimalTimeseries => !(config.Cognite?.MetadataTargets?.CleanMetadata?.Timeseries ?? false); + protected virtual bool createMinimalTimeseries => !(config.Cognite?.MetadataTargets?.Clean?.Timeseries ?? false); public TimeseriesWriter(ILogger logger, CogniteDestination destination, FullConfig config) { diff --git a/ExtractorLauncher/ExtractorStarter.cs b/ExtractorLauncher/ExtractorStarter.cs index 13b516e6..2e2cc1fe 100644 --- a/ExtractorLauncher/ExtractorStarter.cs +++ b/ExtractorLauncher/ExtractorStarter.cs @@ -140,13 +140,13 @@ public static class ExtractorStarter var useCleanRelationships = rawMetadata?.Database == null || rawMetadata?.RelationshipsTable == null; config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Assets = useCleanAssets, Timeseries = useCleanTimeseries, Relationships = useCleanRelationships }, - RawMetadata = new RawMetadataTargetConfig + Raw = new RawMetadataTargetConfig { Database = rawMetadata?.Database, AssetsTable = rawMetadata?.AssetsTable, @@ -161,8 +161,8 @@ public static class ExtractorStarter if (config.Cognite == null) config.Cognite = new CognitePusherConfig(); if (config.Cognite.MetadataTargets == null) config.Cognite.MetadataTargets = new MetadataTargetsConfig(); - if (config.Cognite.MetadataTargets.FlexibleDataModels == null) { - config.Cognite.MetadataTargets.FlexibleDataModels = config.Cognite.FlexibleDataModels; + if (config.Cognite.MetadataTargets.DataModels == null) { + config.Cognite.MetadataTargets.DataModels = config.Cognite.FlexibleDataModels; } } #pragma warning restore 0618 diff --git a/Test/Integration/NodeExtractionTests.cs b/Test/Integration/NodeExtractionTests.cs index 9eee0ae7..2cadd92d 100644 --- a/Test/Integration/NodeExtractionTests.cs +++ b/Test/Integration/NodeExtractionTests.cs @@ -869,7 +869,7 @@ public async Task TestUpdateFields( tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Assets = true, Timeseries = true @@ -936,13 +936,13 @@ public async Task TestUpdateFieldsRaw(bool assets, bool timeseries) tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Relationships = true, Assets = false, Timeseries = false }, - RawMetadata = new RawMetadataTargetConfig + Raw = new RawMetadataTargetConfig { Database = "metadata", AssetsTable = "assets", @@ -1012,7 +1012,7 @@ public async Task TestUpdateNullPropertyValue() }; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Assets = true, Timeseries = true diff --git a/Test/Unit/CDFPusherTest.cs b/Test/Unit/CDFPusherTest.cs index 15784531..2a44ffd5 100644 --- a/Test/Unit/CDFPusherTest.cs +++ b/Test/Unit/CDFPusherTest.cs @@ -278,7 +278,7 @@ public async Task TestCreateUpdateAssets() CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Assets = true } @@ -345,13 +345,13 @@ public async Task TestCreateRawAssets() tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Relationships = true, Assets = false, Timeseries = true }, - RawMetadata = new RawMetadataTargetConfig + Raw = new RawMetadataTargetConfig { Database = "metadata", AssetsTable = "assets" @@ -391,13 +391,13 @@ public async Task TestUpdateRawAssets() tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Relationships = true, Assets = false, Timeseries = true }, - RawMetadata = new RawMetadataTargetConfig + Raw = new RawMetadataTargetConfig { Database = "metadata", AssetsTable = "assets", @@ -440,7 +440,7 @@ public async Task TestCreateUpdateTimeseries() CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Relationships = true, Assets = false, @@ -520,13 +520,13 @@ public async Task TestCreateRawTimeseries() tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Relationships = true, Assets = false, Timeseries = false }, - RawMetadata = new RawMetadataTargetConfig + Raw = new RawMetadataTargetConfig { Database = "metadata", AssetsTable = "assets", @@ -572,13 +572,13 @@ public async Task TestUpdateRawTimeseries() tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Relationships = true, Assets = false, Timeseries = false }, - RawMetadata = new RawMetadataTargetConfig + Raw = new RawMetadataTargetConfig { Database = "metadata", AssetsTable = "assets", @@ -639,7 +639,7 @@ public async Task TestNodeCallback() CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Relationships = true, Assets = true, @@ -734,11 +734,11 @@ public async Task TestRawNodeCallback() tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Timeseries = false, }, - RawMetadata = new RawMetadataTargetConfig + Raw = new RawMetadataTargetConfig { Database = "metadata", AssetsTable = "assets", @@ -935,7 +935,7 @@ public async Task TestCreateRelationships() tester.Config.Extraction.Relationships.Enabled = true; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Assets = false, Timeseries = false @@ -1001,7 +1001,7 @@ public async Task TestCreateRawRelationships() tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - RawMetadata = new RawMetadataTargetConfig + Raw = new RawMetadataTargetConfig { RelationshipsTable = "relationships", Database = "metadata" @@ -1200,7 +1200,7 @@ public async Task TestCDFAsSourceData() }; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - RawMetadata = new RawMetadataTargetConfig + Raw = new RawMetadataTargetConfig { AssetsTable = "assets", TimeseriesTable = "timeseries", @@ -1278,7 +1278,7 @@ public async Task TestCDFAsSourceEvents() }; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - RawMetadata = new RawMetadataTargetConfig + Raw = new RawMetadataTargetConfig { AssetsTable = "assets", TimeseriesTable = "timeseries", @@ -1353,12 +1353,12 @@ public async Task TestCDFNodeSetBackground() }; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Relationships = true, Assets = false, }, - RawMetadata = new RawMetadataTargetConfig + Raw = new RawMetadataTargetConfig { Database = "metadata", TimeseriesTable = "timeseries", diff --git a/Test/Unit/DeleteTest.cs b/Test/Unit/DeleteTest.cs index 392d58d3..22ad7a1f 100644 --- a/Test/Unit/DeleteTest.cs +++ b/Test/Unit/DeleteTest.cs @@ -393,7 +393,7 @@ public async Task TestCDFDelete() tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Assets = true, // Timeseries = true @@ -437,11 +437,11 @@ public async Task TestCDFDeleteRaw() tester.Config.Cognite.DeleteRelationships = true; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - CleanMetadata = new CleanMetadataTargetConfig + Clean = new CleanMetadataTargetConfig { Timeseries = true, }, - RawMetadata = new RawMetadataTargetConfig + Raw = new RawMetadataTargetConfig { Database = "metadata", AssetsTable = "assets", diff --git a/Test/Unit/FDMTests.cs b/Test/Unit/FDMTests.cs index 5b86c819..ece68d9a 100644 --- a/Test/Unit/FDMTests.cs +++ b/Test/Unit/FDMTests.cs @@ -25,7 +25,7 @@ public FDMTests(ITestOutputHelper output, FDMTestFixture tester) tester.ResetConfig(); tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - FlexibleDataModels = new FdmDestinationConfig + DataModels = new FdmDestinationConfig { Enabled = true, Space = "test", @@ -46,8 +46,8 @@ public FDMTests(ITestOutputHelper output, FDMTestFixture tester) [Fact] public async Task TestMapCustomTypes() { - tester.Config.Cognite.MetadataTargets.FlexibleDataModels.ExcludeNonReferenced = true; - tester.Config.Cognite.MetadataTargets.FlexibleDataModels.TypesToMap = TypesToMap.Custom; + tester.Config.Cognite.MetadataTargets.DataModels.ExcludeNonReferenced = true; + tester.Config.Cognite.MetadataTargets.DataModels.TypesToMap = TypesToMap.Custom; var (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); @@ -72,8 +72,8 @@ public async Task TestMapCustomTypes() [Fact] public async Task TestMapReferencedTypes() { - tester.Config.Cognite.MetadataTargets.FlexibleDataModels.ExcludeNonReferenced = true; - tester.Config.Cognite.MetadataTargets.FlexibleDataModels.TypesToMap = TypesToMap.Referenced; + tester.Config.Cognite.MetadataTargets.DataModels.ExcludeNonReferenced = true; + tester.Config.Cognite.MetadataTargets.DataModels.TypesToMap = TypesToMap.Referenced; var (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); @@ -94,8 +94,8 @@ public async Task TestMapReferencedTypes() [Fact] public async Task TestMapReferencedTypesNoTrim() { - tester.Config.Cognite.MetadataTargets.FlexibleDataModels.ExcludeNonReferenced = false; - tester.Config.Cognite.MetadataTargets.FlexibleDataModels.TypesToMap = TypesToMap.Referenced; + tester.Config.Cognite.MetadataTargets.DataModels.ExcludeNonReferenced = false; + tester.Config.Cognite.MetadataTargets.DataModels.TypesToMap = TypesToMap.Referenced; var (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); @@ -116,8 +116,8 @@ public async Task TestMapReferencedTypesNoTrim() [Fact] public async Task TestMapEverything() { - tester.Config.Cognite.MetadataTargets.FlexibleDataModels.ExcludeNonReferenced = false; - tester.Config.Cognite.MetadataTargets.FlexibleDataModels.TypesToMap = TypesToMap.All; + tester.Config.Cognite.MetadataTargets.DataModels.ExcludeNonReferenced = false; + tester.Config.Cognite.MetadataTargets.DataModels.TypesToMap = TypesToMap.All; var (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); From 563873841e19052ef3faca466ecc30cf99a3e040 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Tue, 25 Jul 2023 03:51:40 +0200 Subject: [PATCH 15/26] refactor: abstract away --- Extractor/Nodes/UAVariable.cs | 28 ++-- Extractor/Pushers/MqttPusher.cs | 2 +- .../Pushers/Writers/BaseTimeseriesWriter.cs | 128 +++++++++++++++ .../Writers/MinimalTimeseriesWriter.cs | 30 +++- Extractor/Pushers/Writers/TimeseriesWriter.cs | 152 +++--------------- Test/Unit/TypesTest.cs | 2 +- 6 files changed, 193 insertions(+), 149 deletions(-) create mode 100644 Extractor/Pushers/Writers/BaseTimeseriesWriter.cs diff --git a/Extractor/Nodes/UAVariable.cs b/Extractor/Nodes/UAVariable.cs index 2ec6d3eb..d748ad5b 100644 --- a/Extractor/Nodes/UAVariable.cs +++ b/Extractor/Nodes/UAVariable.cs @@ -457,22 +457,9 @@ public TimeSeriesCreate ToTimeseries( UAExtractor extractor, long? dataSetId, IDictionary? nodeToAssetIds, - Dictionary? metaMap, - bool minimal = false) + Dictionary? metaMap) { string? externalId = GetUniqueId(client); - - if (minimal) - { - return new TimeSeriesCreate - { - ExternalId = externalId, - IsString = FullAttributes.DataType.IsString, - IsStep = FullAttributes.DataType.IsStep, - DataSetId = dataSetId - }; - } - var writePoco = new TimeSeriesCreate { Description = FullAttributes.Description, @@ -502,6 +489,19 @@ public TimeSeriesCreate ToTimeseries( return writePoco; } + + public TimeSeriesCreate ToMinimalTimeseries(IUAClientAccess client, long? dataSetId) + { + string? externalId = GetUniqueId(client); + + return new TimeSeriesCreate + { + ExternalId = externalId, + IsString = FullAttributes.DataType.IsString, + IsStep = FullAttributes.DataType.IsStep, + DataSetId = dataSetId + }; + } #endregion } diff --git a/Extractor/Pushers/MqttPusher.cs b/Extractor/Pushers/MqttPusher.cs index fdc2cd5c..eeace687 100644 --- a/Extractor/Pushers/MqttPusher.cs +++ b/Extractor/Pushers/MqttPusher.cs @@ -697,7 +697,7 @@ private async Task PushTimeseries(IEnumerable variables, TypeU { var minimalTimeseries = variables .Where(variable => !update.AnyUpdate || !variable.Changed) - .Select(variable => variable.ToTimeseries(fullConfig, Extractor, Extractor, config.DataSetId, null, null, true)) + .Select(variable => variable.ToMinimalTimeseries(Extractor, config.DataSetId)) .Where(variable => variable != null) .ToList(); diff --git a/Extractor/Pushers/Writers/BaseTimeseriesWriter.cs b/Extractor/Pushers/Writers/BaseTimeseriesWriter.cs new file mode 100644 index 00000000..b599cfbd --- /dev/null +++ b/Extractor/Pushers/Writers/BaseTimeseriesWriter.cs @@ -0,0 +1,128 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Cognite.Extensions; +using Cognite.Extractor.Utils; +using Cognite.OpcUa.Config; +using Cognite.OpcUa.Nodes; +using Cognite.OpcUa.NodeSources; +using Cognite.OpcUa.Pushers.Writers.Dtos; +using Cognite.OpcUa.Pushers.Writers.Interfaces; +using CogniteSdk; +using Microsoft.Extensions.Logging; +using Opc.Ua; + +namespace Cognite.OpcUa.Pushers.Writers +{ + public abstract class BaseTimeseriesWriter : ITimeseriesWriter + { + protected readonly ILogger logger; + protected readonly FullConfig config; + protected readonly CogniteDestination destination; + + public BaseTimeseriesWriter(ILogger logger, CogniteDestination destination, FullConfig config) + { + this.logger = logger; + this.config = config; + this.destination = destination; + } + + + /// + /// Synchronizes all BaseUANode to CDF Timeseries + /// + /// UAExtractor instance + /// Dictionary of mapping of variables to keys + /// Node to assets to ids + /// Mismatched timeseries + /// Type update configuration + /// Cancellation token + /// Operation result + public virtual async Task PushVariables(UAExtractor extractor, IDictionary timeseriesMap, + IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, TypeUpdateConfig update, CancellationToken token) + { + var result = new Result { Created = 0, Updated = 0 }; + var timeseries = await CreateTimeseries( + extractor, + timeseriesMap, + nodeToAssetIds, + mismatchedTimeseries, + result, + token + ); + + var toPushMeta = timeseriesMap + .Where(kvp => kvp.Value.Source != NodeSource.CDF) + .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); + + if (update.AnyUpdate && toPushMeta.Any()) + { + await UpdateTimeseries(extractor, toPushMeta, timeseries, nodeToAssetIds, update, result, token); + } + return result; + } + + /// + /// Create BaseUANode to CDF Timeseries + /// + /// UAExtractor instance + /// Dictionary of mapping of variables to keys + /// Node to assets to ids + /// Mismatched timeseries + /// Operation result + /// Type update configuration + /// Indicate if to create minimal timeseries + /// Cancellation token + /// Operation result + private async Task> CreateTimeseries(UAExtractor extractor, IDictionary tsMap, + IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, Result result, CancellationToken token) + { + var timeseries = await destination.GetOrCreateTimeSeriesAsync( + tsMap.Keys, + ids => BuildTimeseries(tsMap, ids, extractor, nodeToAssetIds, result), + RetryMode.None, + SanitationMode.Clean, + token + ); + + logger.LogResult(timeseries, RequestType.CreateTimeSeries, true); + + timeseries.ThrowOnFatal(); + + if (timeseries.Results == null) + return Array.Empty(); + + var foundBadTimeseries = new List(); + foreach (var ts in timeseries.Results) + { + var loc = tsMap[ts.ExternalId]; + if (nodeToAssetIds.TryGetValue(loc.ParentId, out var parentId)) + { + nodeToAssetIds[loc.Id] = parentId; + } + if (ts.IsString != loc.FullAttributes.DataType.IsString) + { + mismatchedTimeseries.Add(ts.ExternalId); + foundBadTimeseries.Add(ts.ExternalId); + } + } + if (foundBadTimeseries.Any()) + { + logger.LogDebug( + "Found mismatched timeseries when ensuring: {TimeSeries}", + string.Join(", ", foundBadTimeseries) + ); + } + + return timeseries.Results; + } + + protected abstract IEnumerable BuildTimeseries(IDictionary tsMap, + IEnumerable ids, UAExtractor extractor, IDictionary nodeToAssetIds, Result result); + + protected abstract Task UpdateTimeseries(UAExtractor extractor, IDictionary tsMap, + IEnumerable timeseries, IDictionary nodeToAssetIds, TypeUpdateConfig update, Result result, CancellationToken token); + } +} diff --git a/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs b/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs index 121b0dba..189c53c2 100644 --- a/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs @@ -15,21 +15,43 @@ You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; using Cognite.Extractor.Utils; using Cognite.OpcUa.Config; -using Cognite.OpcUa.Pushers.Writers.Interfaces; +using Cognite.OpcUa.Nodes; +using Cognite.OpcUa.Pushers.Writers.Dtos; +using CogniteSdk; using Microsoft.Extensions.Logging; +using Opc.Ua; namespace Cognite.OpcUa.Pushers.Writers { - public class MinimalTimeseriesWriter : TimeseriesWriter, ITimeseriesWriter + public class MinimalTimeseriesWriter : BaseTimeseriesWriter { public MinimalTimeseriesWriter( - ILogger logger, + ILogger logger, CogniteDestination destination, FullConfig config ) : base(logger, destination, config) { } - protected override bool createMinimalTimeseries => true; + + protected override IEnumerable BuildTimeseries(IDictionary tsMap, + IEnumerable ids, UAExtractor extractor, IDictionary nodeToAssetIds, Result result) + { + var tss = ids.Select(id => tsMap[id]); + var creates = tss.Select(ts => ts.ToMinimalTimeseries(extractor, config.Cognite?.DataSet?.Id)) + .Where(ts => ts != null); + result.Created += creates.Count(); + return creates; + } + + protected override Task UpdateTimeseries(UAExtractor extractor, IDictionary tsMap, + IEnumerable timeseries, IDictionary nodeToAssetIds, TypeUpdateConfig update, Result result, CancellationToken token) + { + return Task.CompletedTask; + } } } diff --git a/Extractor/Pushers/Writers/TimeseriesWriter.cs b/Extractor/Pushers/Writers/TimeseriesWriter.cs index 257dd950..3ca94ab2 100644 --- a/Extractor/Pushers/Writers/TimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/TimeseriesWriter.cs @@ -15,7 +15,6 @@ You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ -using System; using System.Collections.Generic; using System.Linq; using System.Threading; @@ -24,142 +23,37 @@ You should have received a copy of the GNU General Public License using Cognite.Extractor.Utils; using Cognite.OpcUa.Config; using Cognite.OpcUa.Nodes; -using Cognite.OpcUa.NodeSources; using Cognite.OpcUa.Pushers.Writers.Dtos; -using Cognite.OpcUa.Pushers.Writers.Interfaces; using CogniteSdk; using Microsoft.Extensions.Logging; using Opc.Ua; namespace Cognite.OpcUa.Pushers.Writers { - public class TimeseriesWriter : ITimeseriesWriter + public class TimeseriesWriter : BaseTimeseriesWriter { - private ILogger log; - private readonly FullConfig config; - private readonly CogniteDestination destination; - protected virtual bool createMinimalTimeseries => !(config.Cognite?.MetadataTargets?.Clean?.Timeseries ?? false); + public TimeseriesWriter(ILogger logger, CogniteDestination destination, FullConfig config) + : base(logger, destination, config) + { } - public TimeseriesWriter(ILogger logger, CogniteDestination destination, FullConfig config) + protected override IEnumerable BuildTimeseries(IDictionary tsMap, + IEnumerable ids, UAExtractor extractor, IDictionary nodeToAssetIds, Result result) { - this.log = logger; - this.config = config; - this.destination = destination; - } - - /// - /// Synchronizes all BaseUANode to CDF Timeseries - /// - /// UAExtractor instance - /// Dictionary of mapping of variables to keys - /// Node to assets to ids - /// Mismatched timeseries - /// Type update configuration - /// Cancellation token - /// Operation result - public virtual async Task PushVariables(UAExtractor extractor, IDictionary timeseriesMap, - IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, TypeUpdateConfig update, CancellationToken token) - { - var result = new Result { Created = 0, Updated = 0 }; - var timeseries = await CreateTimeseries( - extractor, - timeseriesMap, - nodeToAssetIds, - mismatchedTimeseries, - result, - createMinimalTimeseries, - token - ); - - var toPushMeta = timeseriesMap - .Where(kvp => kvp.Value.Source != NodeSource.CDF) - .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); - - if (update.AnyUpdate && toPushMeta.Any()) - { - await UpdateTimeseries(extractor, toPushMeta, timeseries, nodeToAssetIds, update, result, token); - } - return result; - } - - /// - /// Create BaseUANode to CDF Timeseries - /// - /// UAExtractor instance - /// Dictionary of mapping of variables to keys - /// Node to assets to ids - /// Mismatched timeseries - /// Operation result - /// Type update configuration - /// Indicate if to create minimal timeseries - /// Cancellation token - /// Operation result - private async Task> CreateTimeseries(UAExtractor extractor, IDictionary tsMap, - IDictionary nodeToAssetIds, HashSet mismatchedTimeseries, Result result, bool createMinimalTimeseries, CancellationToken token) - { - var timeseries = await destination.GetOrCreateTimeSeriesAsync( - tsMap.Keys, - ids => - { - var tss = ids.Select(id => tsMap[id]); - var creates = tss.Select( - ts => - ts.ToTimeseries( - config, - extractor, - extractor, - config.Cognite?.DataSet?.Id, - nodeToAssetIds, - config.Cognite?.MetadataMapping?.Timeseries, - createMinimalTimeseries - ) - ) - .Where(ts => ts != null); - if (createMinimalTimeseries) - { - result.Created += creates.Count(); - } - else - { - result.Created += creates.Count(); - } - return creates; - }, - RetryMode.None, - SanitationMode.Clean, - token - ); - - log.LogResult(timeseries, RequestType.CreateTimeSeries, true); - - timeseries.ThrowOnFatal(); - - if (timeseries.Results == null) - return Array.Empty(); - - var foundBadTimeseries = new List(); - foreach (var ts in timeseries.Results) - { - var loc = tsMap[ts.ExternalId]; - if (nodeToAssetIds.TryGetValue(loc.ParentId, out var parentId)) - { - nodeToAssetIds[loc.Id] = parentId; - } - if (ts.IsString != loc.FullAttributes.DataType.IsString) - { - mismatchedTimeseries.Add(ts.ExternalId); - foundBadTimeseries.Add(ts.ExternalId); - } - } - if (foundBadTimeseries.Any()) - { - log.LogDebug( - "Found mismatched timeseries when ensuring: {TimeSeries}", - string.Join(", ", foundBadTimeseries) - ); - } - - return timeseries.Results; + var tss = ids.Select(id => tsMap[id]); + var creates = tss.Select( + ts => + ts.ToTimeseries( + config, + extractor, + extractor, + config.Cognite?.DataSet?.Id, + nodeToAssetIds, + config.Cognite?.MetadataMapping?.Timeseries + ) + ) + .Where(ts => ts != null); + result.Created += creates.Count(); + return creates; } /// @@ -172,7 +66,7 @@ private async Task> CreateTimeseries(UAExtractor extract /// Operation result /// Cancellation token /// Operation result - private async Task UpdateTimeseries(UAExtractor extractor, IDictionary tsMap, + protected override async Task UpdateTimeseries(UAExtractor extractor, IDictionary tsMap, IEnumerable timeseries, IDictionary nodeToAssetIds, TypeUpdateConfig update, Result result, CancellationToken token) { var updates = new List(); @@ -195,7 +89,7 @@ private async Task UpdateTimeseries(UAExtractor extractor, IDictionary Date: Tue, 25 Jul 2023 13:52:07 +0200 Subject: [PATCH 16/26] feat: abstract timeseries --- Extractor/Pushers/CDFPusher.cs | 45 ++++++------ Extractor/Pushers/FDM/FDMWriter.cs | 2 +- .../Pushers/Writers/BaseTimeseriesWriter.cs | 6 +- Extractor/Pushers/Writers/CDFWriter.cs | 27 +++---- .../Pushers/Writers/Interfaces/ICDFWriter.cs | 10 ++- .../Writers/MinimalTimeseriesWriter.cs | 6 +- Extractor/Pushers/Writers/TimeseriesWriter.cs | 2 +- Extractor/Pushers/Writers/WriterUtils.cs | 73 +++++++++++++------ ExtractorLauncher/ExtractorStarter.cs | 2 +- Test/Unit/CDFPusherTest.cs | 2 +- Test/Unit/TypesTest.cs | 2 +- Test/Utils/BaseExtractorTestFixture.cs | 2 +- 12 files changed, 107 insertions(+), 72 deletions(-) diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index ebbea691..4bebf68b 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -22,7 +22,6 @@ You should have received a copy of the GNU General Public License using Cognite.OpcUa.History; using Cognite.OpcUa.Nodes; using Cognite.OpcUa.NodeSources; -using Cognite.OpcUa.Pushers.FDM; using Cognite.OpcUa.Pushers.Writers.Interfaces; using Cognite.OpcUa.Types; using CogniteSdk; @@ -61,9 +60,9 @@ public sealed class CDFPusher : IPusher private UAExtractor extractor; public UAExtractor Extractor { get => extractor; set { extractor = value; - if (fdmDestination != null) + if (cdfWriter.FDM != null) { - fdmDestination.Extractor = value; + cdfWriter.FDM.Extractor = value; } } } public IPusherConfig BaseConfig { get; } @@ -73,7 +72,6 @@ public sealed class CDFPusher : IPusher private readonly CogniteDestination destination; private readonly BrowseCallback? callback; - private readonly FDMWriter? fdmDestination; private RawMetadataTargetConfig? RawMetadataTargetConfig => fullConfig.Cognite?.MetadataTargets?.Raw; private CleanMetadataTargetConfig? CleanMetadataTargetConfig => fullConfig.Cognite?.MetadataTargets?.Clean; @@ -96,11 +94,6 @@ public CDFPusher( { callback = new BrowseCallback(destination, config.BrowseCallback, log); } - if (config.MetadataTargets?.DataModels != null && (config.MetadataTargets?.DataModels.Enabled ?? false)) - { - fdmDestination = new FDMWriter(provider.GetRequiredService(), destination, - provider.GetRequiredService>()); - } } private static readonly Counter dataPointsCounter = Metrics @@ -325,9 +318,9 @@ public async Task PushNodes(IEnumerable objects, if (fullConfig.DryRun) { - if (fdmDestination != null) + if (cdfWriter.FDM != null) { - await fdmDestination.PushNodes(objects, variables, references, Extractor, token); + await cdfWriter.FDM.PushNodes(objects, variables, references, Extractor, token); } return result; } @@ -348,10 +341,19 @@ public async Task PushNodes(IEnumerable objects, var tasks = new List(); - tasks.Add(PushAssets(objects, update.Objects, report, result, token)); + if (cdfWriter.Assets != null) + { + tasks.Add(PushAssets(objects, update.Objects, report, result, token)); + } + tasks.Add(PushTimeseries(variables, update.Variables, report, result, token)); - tasks.Add(PushReferences(references, report, result, token)); - if (fdmDestination != null) + + if (cdfWriter.Relationships != null) + { + tasks.Add(PushReferences(references, report, result, token)); + } + + if (cdfWriter.FDM != null) { tasks.Add(PushFdm(objects, variables, references, result, token)); } @@ -389,7 +391,7 @@ private async Task PushFdm(IEnumerable objects, IEnumerable assetsMap, Ty { try { - var _result = await cdfWriter.Assets.PushNodes(Extractor, assetsMap, nodeToAssetIds, update, token); + var _result = await cdfWriter.Assets!.PushNodes(Extractor, assetsMap, nodeToAssetIds, update, token); report.AssetsCreated += _result.Created; report.AssetsUpdated += _result.Updated; } @@ -689,7 +691,7 @@ CancellationToken token { try { - var _result = await cdfWriter.Raw.PushNodes( + var _result = await cdfWriter.Raw!.PushNodes( Extractor, RawMetadataTargetConfig!.Database!, RawMetadataTargetConfig!.AssetsTable!, @@ -804,9 +806,8 @@ private async Task PushCleanTimeseries(IDictionary timeserie { try { + var _result = await cdfWriter.Timeseries!.PushVariables(Extractor, timeseriesMap, nodeToAssetIds, mismatchedTimeseries, update, token); var createMinimal = !(CleanMetadataTargetConfig?.Timeseries ?? false); - var writer = createMinimal ? cdfWriter.MinimalTimeseries : cdfWriter.Timeseries; - var _result = await writer.PushVariables(Extractor, timeseriesMap, nodeToAssetIds, mismatchedTimeseries, update, token); if (createMinimal) { report.MinimalTimeSeriesCreated += _result.Created; @@ -840,7 +841,7 @@ private async Task PushRawTimeseries(ConcurrentDictionary ts .Where(kvp => kvp.Value.Source != NodeSource.CDF) .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); - var _result = await cdfWriter.Raw.PushNodes( + var _result = await cdfWriter.Raw!.PushNodes( Extractor, RawMetadataTargetConfig!.Database!, RawMetadataTargetConfig!.TimeseriesTable!, @@ -1012,7 +1013,7 @@ private async Task PushCleanReferences(IEnumerable relations { try { - var _result = await cdfWriter.Relationships.PushReferences(relationships, token); + var _result = await cdfWriter.Relationships!.PushReferences(relationships, token); report.RelationshipsCreated += _result.Created; } catch (Exception e) @@ -1035,7 +1036,7 @@ private async Task PushRawReferences(IEnumerable relationshi { try { - var _result = await cdfWriter.Raw.PushReferences(RawMetadataTargetConfig!.Database!, RawMetadataTargetConfig!.RelationshipsTable!, relationships, token); + var _result = await cdfWriter.Raw!.PushReferences(RawMetadataTargetConfig!.Database!, RawMetadataTargetConfig!.RelationshipsTable!, relationships, token); report.RawRelationshipsCreated += _result.Created; } catch (Exception e) { diff --git a/Extractor/Pushers/FDM/FDMWriter.cs b/Extractor/Pushers/FDM/FDMWriter.cs index 68a82fcc..9836b2f3 100644 --- a/Extractor/Pushers/FDM/FDMWriter.cs +++ b/Extractor/Pushers/FDM/FDMWriter.cs @@ -34,7 +34,7 @@ You should have received a copy of the GNU General Public License namespace Cognite.OpcUa.Pushers.FDM { - internal class FDMWriter + public class FDMWriter { private CogniteDestination destination; private FullConfig config; diff --git a/Extractor/Pushers/Writers/BaseTimeseriesWriter.cs b/Extractor/Pushers/Writers/BaseTimeseriesWriter.cs index b599cfbd..530f315b 100644 --- a/Extractor/Pushers/Writers/BaseTimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/BaseTimeseriesWriter.cs @@ -16,13 +16,13 @@ namespace Cognite.OpcUa.Pushers.Writers { - public abstract class BaseTimeseriesWriter : ITimeseriesWriter + public abstract class BaseTimeseriesWriter : ITimeseriesWriter { - protected readonly ILogger logger; + protected readonly ILogger logger; protected readonly FullConfig config; protected readonly CogniteDestination destination; - public BaseTimeseriesWriter(ILogger logger, CogniteDestination destination, FullConfig config) + public BaseTimeseriesWriter(ILogger logger, CogniteDestination destination, FullConfig config) { this.logger = logger; this.config = config; diff --git a/Extractor/Pushers/Writers/CDFWriter.cs b/Extractor/Pushers/Writers/CDFWriter.cs index 2a35d38e..88afa617 100644 --- a/Extractor/Pushers/Writers/CDFWriter.cs +++ b/Extractor/Pushers/Writers/CDFWriter.cs @@ -1,28 +1,29 @@ +using Cognite.OpcUa.Pushers.FDM; using Cognite.OpcUa.Pushers.Writers.Interfaces; namespace Cognite.OpcUa.Pushers.Writers { public class CDFWriter : ICDFWriter { - public IRawWriter Raw { get; } + public IRawWriter? Raw { get; } public ITimeseriesWriter Timeseries { get; } - public IAssetsWriter Assets { get; } - public IRelationshipsWriter Relationships{ get; } - public ITimeseriesWriter MinimalTimeseries { get; } + public IAssetsWriter? Assets { get; } + public IRelationshipsWriter? Relationships { get; } + public FDMWriter? FDM { get; } public CDFWriter( - IRawWriter rawWriter, ITimeseriesWriter timeseriesWriter, - IAssetsWriter assetsWriter, - IRelationshipsWriter relationshipsWriter, - ITimeseriesWriter minimalTimeSeriesWriter + IRawWriter? rawWriter, + IAssetsWriter? assetsWriter, + IRelationshipsWriter? relationshipsWriter, + FDMWriter? fdmWriter ) { - this.Raw = rawWriter; - this.Timeseries = timeseriesWriter; - this.Assets = assetsWriter; - this.Relationships = relationshipsWriter; - this.MinimalTimeseries = minimalTimeSeriesWriter; + Raw = rawWriter; + Timeseries = timeseriesWriter; + Assets = assetsWriter; + Relationships = relationshipsWriter; + FDM = fdmWriter; } } } diff --git a/Extractor/Pushers/Writers/Interfaces/ICDFWriter.cs b/Extractor/Pushers/Writers/Interfaces/ICDFWriter.cs index 62e1e189..f0d5ed64 100644 --- a/Extractor/Pushers/Writers/Interfaces/ICDFWriter.cs +++ b/Extractor/Pushers/Writers/Interfaces/ICDFWriter.cs @@ -1,11 +1,13 @@ +using Cognite.OpcUa.Pushers.FDM; + namespace Cognite.OpcUa.Pushers.Writers.Interfaces { public interface ICDFWriter { - IRawWriter Raw { get; } + IRawWriter? Raw { get; } ITimeseriesWriter Timeseries { get; } - ITimeseriesWriter MinimalTimeseries { get; } - IAssetsWriter Assets { get; } - IRelationshipsWriter Relationships { get; } + IAssetsWriter? Assets { get; } + IRelationshipsWriter? Relationships { get; } + FDMWriter? FDM { get; } } } diff --git a/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs b/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs index 189c53c2..3b983b51 100644 --- a/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/MinimalTimeseriesWriter.cs @@ -29,7 +29,7 @@ You should have received a copy of the GNU General Public License namespace Cognite.OpcUa.Pushers.Writers { - public class MinimalTimeseriesWriter : BaseTimeseriesWriter + public class MinimalTimeseriesWriter : BaseTimeseriesWriter { public MinimalTimeseriesWriter( ILogger logger, @@ -44,8 +44,8 @@ protected override IEnumerable BuildTimeseries(IDictionary tsMap[id]); var creates = tss.Select(ts => ts.ToMinimalTimeseries(extractor, config.Cognite?.DataSet?.Id)) .Where(ts => ts != null); - result.Created += creates.Count(); - return creates; + result.Created += creates.Count(); + return creates; } protected override Task UpdateTimeseries(UAExtractor extractor, IDictionary tsMap, diff --git a/Extractor/Pushers/Writers/TimeseriesWriter.cs b/Extractor/Pushers/Writers/TimeseriesWriter.cs index 3ca94ab2..b20cfe51 100644 --- a/Extractor/Pushers/Writers/TimeseriesWriter.cs +++ b/Extractor/Pushers/Writers/TimeseriesWriter.cs @@ -30,7 +30,7 @@ You should have received a copy of the GNU General Public License namespace Cognite.OpcUa.Pushers.Writers { - public class TimeseriesWriter : BaseTimeseriesWriter + public class TimeseriesWriter : BaseTimeseriesWriter { public TimeseriesWriter(ILogger logger, CogniteDestination destination, FullConfig config) : base(logger, destination, config) diff --git a/Extractor/Pushers/Writers/WriterUtils.cs b/Extractor/Pushers/Writers/WriterUtils.cs index de80085f..5e056c08 100644 --- a/Extractor/Pushers/Writers/WriterUtils.cs +++ b/Extractor/Pushers/Writers/WriterUtils.cs @@ -1,6 +1,7 @@ using System.Threading; using Cognite.Extractor.Utils; using Cognite.OpcUa.Config; +using Cognite.OpcUa.Pushers.FDM; using Cognite.OpcUa.Pushers.Writers.Interfaces; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; @@ -9,34 +10,64 @@ namespace Cognite.OpcUa.Pushers.Writers { public static class WriterUtils { - public static void AddWriters(this IServiceCollection services, CancellationToken token) + public static void AddWriters(this IServiceCollection services, CancellationToken token, FullConfig config) { - services.AddSingleton(provider => - { - var dest = provider.GetRequiredService(); + services.AddSingleton(provider => { + var destination = provider.GetRequiredService(); var config = provider.GetRequiredService(); - return new CDFWriter( - new RawWriter(provider.GetRequiredService>(), dest, config), - new TimeseriesWriter( - provider.GetRequiredService>(), - dest, - config - ), - new AssetsWriter( + return (config.Cognite?.MetadataTargets?.Clean?.Timeseries ?? false) + ? new TimeseriesWriter(provider.GetRequiredService>(), destination, config) + : new MinimalTimeseriesWriter(provider.GetRequiredService>(), destination, config); + }); + if (config.Cognite?.MetadataTargets?.Clean?.Assets ?? false) + { + services.AddSingleton(provider => { + var destination = provider.GetRequiredService(); + return new AssetsWriter( provider.GetRequiredService>(), - dest, + destination, config - ), - new RelationshipsWriter( + ); + }); + } + if (config.Cognite?.MetadataTargets?.Clean?.Assets ?? false) + { + services.AddSingleton(provider => { + var destination = provider.GetRequiredService(); + return new RelationshipsWriter( provider.GetRequiredService>(), - dest, + destination, config - ), - new MinimalTimeseriesWriter( - provider.GetRequiredService>(), - dest, + ); + }); + } + if (config.Cognite?.MetadataTargets?.Raw is not null) + { + services.AddSingleton(provider => { + var destination = provider.GetRequiredService(); + return new RawWriter( + provider.GetRequiredService>(), + destination, config - ) + ); + }); + } + if (config.Cognite?.MetadataTargets?.DataModels != null && (config.Cognite?.MetadataTargets?.DataModels.Enabled ?? false)) + { + services.AddSingleton(provider => { + var destination = provider.GetRequiredService(); + return new FDMWriter(provider.GetRequiredService(), destination, + provider.GetRequiredService>()); + }); + } + services.AddSingleton(provider => + { + return new CDFWriter( + provider.GetRequiredService(), + provider.GetService(), + provider.GetService(), + provider.GetService(), + provider.GetService() ); }); } diff --git a/ExtractorLauncher/ExtractorStarter.cs b/ExtractorLauncher/ExtractorStarter.cs index 2e2cc1fe..1d00259a 100644 --- a/ExtractorLauncher/ExtractorStarter.cs +++ b/ExtractorLauncher/ExtractorStarter.cs @@ -365,7 +365,7 @@ public static async Task RunExtractor(ILogger? log, ExtractorParams setup, Servi }); services.AddSingleton(); - services.AddWriters(token); + services.AddWriters(token, config!); var options = new ExtractorRunnerParams { diff --git a/Test/Unit/CDFPusherTest.cs b/Test/Unit/CDFPusherTest.cs index 2a44ffd5..92495f4b 100644 --- a/Test/Unit/CDFPusherTest.cs +++ b/Test/Unit/CDFPusherTest.cs @@ -815,7 +815,7 @@ await pusher.PushNodes(Enumerable.Empty(), Enumerable.Empty(); var pusher = new CDFPusher(Provider.GetRequiredService>(), From 7e6fb9510e2ea6110a179825db5823ce375d1d3f Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Tue, 25 Jul 2023 14:07:29 +0200 Subject: [PATCH 17/26] docs: update use of metdata targets --- Extractor/Config/CogniteConfig.cs | 6 +++--- ExtractorLauncher/ExtractorStarter.cs | 19 ++++++++++------- schema/cognite_config.schema.json | 30 --------------------------- 3 files changed, 15 insertions(+), 40 deletions(-) diff --git a/Extractor/Config/CogniteConfig.cs b/Extractor/Config/CogniteConfig.cs index 0551cadb..0c11320c 100644 --- a/Extractor/Config/CogniteConfig.cs +++ b/Extractor/Config/CogniteConfig.cs @@ -169,9 +169,9 @@ public class RawMetadataTargetConfig } public class CleanMetadataTargetConfig { - public bool Assets { get; set; } = true; - public bool Timeseries { get; set; } = true; - public bool Relationships { get; set; } = true; + public bool Assets { get; set; } + public bool Timeseries { get; set; } + public bool Relationships { get; set; } } public class MetadataMapConfig { diff --git a/ExtractorLauncher/ExtractorStarter.cs b/ExtractorLauncher/ExtractorStarter.cs index 1d00259a..6b1c4447 100644 --- a/ExtractorLauncher/ExtractorStarter.cs +++ b/ExtractorLauncher/ExtractorStarter.cs @@ -155,14 +155,19 @@ public static class ExtractorStarter } }; } - if (config.Cognite?.FlexibleDataModels != null) + else if (config.Cognite?.MetadataTargets == null) { - log.LogWarning("cognite.flexible-data-models is deprecated. Use cognite.metadata-targets.flexible-data-models instead"); - - if (config.Cognite == null) config.Cognite = new CognitePusherConfig(); - if (config.Cognite.MetadataTargets == null) config.Cognite.MetadataTargets = new MetadataTargetsConfig(); - if (config.Cognite.MetadataTargets.DataModels == null) { - config.Cognite.MetadataTargets.DataModels = config.Cognite.FlexibleDataModels; + if (config.Cognite?.SkipMetadata ?? false) + { + log.LogWarning("Use of skip-metadata has been deprecated. use cognite.metadata-targets instead"); + } + else + { + log.LogWarning("Default writing to clean is deprecated, in the future not setting a metadata target will not write metadata to CDF at all"); + if (config.Cognite == null) config.Cognite = new CognitePusherConfig(); + if (config.Cognite.MetadataTargets == null) config.Cognite.MetadataTargets = new MetadataTargetsConfig(); + if (config.Cognite.MetadataTargets.Clean == null) config.Cognite.MetadataTargets.Clean = new CleanMetadataTargetConfig(); + config.Cognite.MetadataTargets.Clean.Timeseries = true; } } #pragma warning restore 0618 diff --git a/schema/cognite_config.schema.json b/schema/cognite_config.schema.json index 988c647f..65662879 100644 --- a/schema/cognite_config.schema.json +++ b/schema/cognite_config.schema.json @@ -29,36 +29,6 @@ "description": "Whether to read start/end-points on startup, where possible. At least one pusher should be able to do this, or `state-store` should be enabled, otherwise back/frontfill will run for the entire history every restart", "default": true }, - "skip-metadata": { - "type": "boolean", - "deprecated": true, - "description": "Do not push any metadata at all. If this is `true`, plain timeseries without metadata will be created, similarly to `raw-metadata`, and datapoints will be pushed. Nothing will be written to raw, band no assets will be created. Events will be created, but without being contextualized to assets" - }, - "raw-metadata": { - "type": "object", - "description": "Store assets/timeseries metadata and relationships in raw. Assets will not be created at all, timeseries will be created with just `externalId`, `isStep`, and `isString`. Both timeseries and assets will be persisted in their entirity to CDF Raw. Datapoints are not affected, events will be created but without being contextualized to assets. The externalId of the source node is added to metadata if applicable", - "unevaluatedProperties": false, - "deprecated": true, - "required": [ "database" ], - "properties": { - "database": { - "type": "string", - "description": "Raw database" - }, - "assets-table": { - "type": "string", - "description": "Raw table to use for assets" - }, - "timeseries-table": { - "type": "string", - "description": "Raw table to use for timeseries" - }, - "relationships-table": { - "type": "string", - "description": "Raw table to use for relationships" - } - } - }, "metadata-targets": { "type": "object", "description": "Metadata targets for objects, variable and references in CDF.", From bf4f439d297d0d382bca6021958863c53a1c0ac6 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Tue, 25 Jul 2023 20:30:36 +0200 Subject: [PATCH 18/26] fix: failing tests --- Extractor/Pushers/CDFPusher.cs | 31 +++++++---------- Extractor/Pushers/Writers/WriterUtils.cs | 2 +- ExtractorLauncher/ExtractorStarter.cs | 13 +++++++ Test/Integration/NodeExtractionTests.cs | 17 +++++----- Test/Unit/CDFPusherTest.cs | 43 ++++++++++++++---------- Test/Unit/DeleteTest.cs | 24 +++++++------ 6 files changed, 72 insertions(+), 58 deletions(-) diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index 4bebf68b..0339b284 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -341,22 +341,13 @@ public async Task PushNodes(IEnumerable objects, var tasks = new List(); - if (cdfWriter.Assets != null) - { - tasks.Add(PushAssets(objects, update.Objects, report, result, token)); - } + tasks.Add(PushAssets(objects, update.Objects, report, result, token)); tasks.Add(PushTimeseries(variables, update.Variables, report, result, token)); - if (cdfWriter.Relationships != null) - { - tasks.Add(PushReferences(references, report, result, token)); - } + tasks.Add(PushReferences(references, report, result, token)); - if (cdfWriter.FDM != null) - { - tasks.Add(PushFdm(objects, variables, references, result, token)); - } + tasks.Add(PushFdm(objects, variables, references, result, token)); await Task.WhenAll(tasks); @@ -388,6 +379,7 @@ public async Task PushNodes(IEnumerable objects, /// Task private async Task PushFdm(IEnumerable objects, IEnumerable variables, IEnumerable references, PushResult result, CancellationToken token) { + if (cdfWriter.FDM == null) return; bool pushResult = true; try { @@ -640,10 +632,10 @@ private ConcurrentDictionary MapAssets(IEnumerableTask private async Task PushAssets(IEnumerable objects, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) { - if (!objects.Any()) return; + if (!objects.Any() && cdfWriter.Assets == null && cdfWriter.Raw == null) return; var assetsMap = MapAssets(objects); - if (CleanMetadataTargetConfig?.Assets ?? false) + if (CleanMetadataTargetConfig?.Assets ?? false && cdfWriter.Assets != null) { await PushCleanAssets(assetsMap, update, report, result, token); } @@ -675,7 +667,7 @@ private async Task PushCleanAssets(IDictionary assetsMap, Ty result.Objects = false; } } - + /// /// Master method for pushing assets to CDF raw. /// @@ -787,7 +779,7 @@ private async Task PushTimeseries(IEnumerable variables, TypeUpdateC var timeseriesMap = MapTimeseries(variables); await PushCleanTimeseries(timeseriesMap, update, report, result, token); - if ((RawMetadataTargetConfig?.Database != null) && (RawMetadataTargetConfig?.TimeseriesTable != null)) + if (cdfWriter.Raw != null && (RawMetadataTargetConfig?.TimeseriesTable != null)) { await PushRawTimeseries(timeseriesMap, update, report, result, token); } @@ -983,18 +975,18 @@ await destination.InsertRawRowsAsync( /// Task private async Task PushReferences(IEnumerable references, BrowseReport report, PushResult result, CancellationToken token) { - if (!references.Any()) return; + if (!references.Any() && cdfWriter.Timeseries == null && cdfWriter.Raw == null) return; var relationships = references .Select(reference => reference.ToRelationship(config.DataSet?.Id, Extractor)) .DistinctBy(rel => rel.ExternalId); - if (CleanMetadataTargetConfig?.Relationships ?? false) + if (cdfWriter.Relationships != null) { await PushCleanReferences(relationships, report, result, token); } - if (RawMetadataTargetConfig?.Database != null && RawMetadataTargetConfig?.RelationshipsTable != null) + if (cdfWriter.Raw != null && RawMetadataTargetConfig?.RelationshipsTable != null) { await PushRawReferences(relationships, report, result, token); } @@ -1107,3 +1099,4 @@ private async Task EnsureConfigInit(CancellationToken token) public void Dispose() { } } } + diff --git a/Extractor/Pushers/Writers/WriterUtils.cs b/Extractor/Pushers/Writers/WriterUtils.cs index 5e056c08..4e0dbcbb 100644 --- a/Extractor/Pushers/Writers/WriterUtils.cs +++ b/Extractor/Pushers/Writers/WriterUtils.cs @@ -52,7 +52,7 @@ public static void AddWriters(this IServiceCollection services, CancellationToke ); }); } - if (config.Cognite?.MetadataTargets?.DataModels != null && (config.Cognite?.MetadataTargets?.DataModels.Enabled ?? false)) + if (config.Cognite?.MetadataTargets?.DataModels != null && config.Cognite.MetadataTargets.DataModels.Enabled) { services.AddSingleton(provider => { var destination = provider.GetRequiredService(); diff --git a/ExtractorLauncher/ExtractorStarter.cs b/ExtractorLauncher/ExtractorStarter.cs index 6b1c4447..4e5faa05 100644 --- a/ExtractorLauncher/ExtractorStarter.cs +++ b/ExtractorLauncher/ExtractorStarter.cs @@ -172,6 +172,19 @@ public static class ExtractorStarter } #pragma warning restore 0618 + if (config.Cognite?.MetadataTargets?.Raw != null) + { + var rawMetaTarget = config.Cognite.MetadataTargets.Raw; + if (rawMetaTarget.Database == null) + { + return "cognite.metadata-targets.raw.database is required when setting raw"; + } + if (rawMetaTarget.AssetsTable == null || rawMetaTarget.RelationshipsTable == null || rawMetaTarget.TimeseriesTable == null) + { + return "Atlease one of assets-table, relationships-table or timeseries-table is required when setting cognite.metadata-targets.raw"; + } + } + return null; } diff --git a/Test/Integration/NodeExtractionTests.cs b/Test/Integration/NodeExtractionTests.cs index 2cadd92d..bbb88206 100644 --- a/Test/Integration/NodeExtractionTests.cs +++ b/Test/Integration/NodeExtractionTests.cs @@ -864,9 +864,6 @@ public async Task TestUpdateFields( bool assetContext, bool variableContext, bool assetMeta, bool variableMeta) { - var (handler, pusher) = tester.GetCDFPusher(); - using var extractor = tester.BuildExtractor(true, null, pusher); - tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { Clean = new CleanMetadataTargetConfig @@ -875,6 +872,9 @@ public async Task TestUpdateFields( Timeseries = true } }; + var (handler, pusher) = tester.GetCDFPusher(); + using var extractor = tester.BuildExtractor(true, null, pusher); + var upd = tester.Config.Extraction.Update; upd.Objects.Name = assetName; upd.Objects.Description = assetDesc; @@ -919,9 +919,6 @@ public async Task TestUpdateFields( // [InlineData(true, true)] public async Task TestUpdateFieldsRaw(bool assets, bool timeseries) { - var (handler, pusher) = tester.GetCDFPusher(); - using var extractor = tester.BuildExtractor(true, null, pusher); - var upd = tester.Config.Extraction.Update; upd.Objects.Name = assets; upd.Objects.Description = assets; @@ -949,6 +946,8 @@ public async Task TestUpdateFieldsRaw(bool assets, bool timeseries) TimeseriesTable = "timeseries" } }; + var (handler, pusher) = tester.GetCDFPusher(); + using var extractor = tester.BuildExtractor(true, null, pusher); tester.Config.Extraction.DataTypes.AllowStringVariables = true; tester.Config.Extraction.DataTypes.MaxArraySize = 4; @@ -993,9 +992,6 @@ public async Task TestUpdateFieldsRaw(bool assets, bool timeseries) [Fact] public async Task TestUpdateNullPropertyValue() { - var (handler, pusher) = tester.GetCDFPusher(); - using var extractor = tester.BuildExtractor(true, null, pusher); - tester.Config.Extraction.RootNode = CommonTestUtils.ToProtoNodeId(tester.Server.Ids.Wrong.Root, tester.Client); tester.Config.Extraction.DataTypes.MaxArraySize = 4; @@ -1018,6 +1014,9 @@ public async Task TestUpdateNullPropertyValue() Timeseries = true } }; + var (handler, pusher) = tester.GetCDFPusher(); + using var extractor = tester.BuildExtractor(true, null, pusher); + tester.Server.Server.MutateNode(tester.Server.Ids.Wrong.TooLargeProp, state => { diff --git a/Test/Unit/CDFPusherTest.cs b/Test/Unit/CDFPusherTest.cs index 92495f4b..a95e9a43 100644 --- a/Test/Unit/CDFPusherTest.cs +++ b/Test/Unit/CDFPusherTest.cs @@ -274,15 +274,18 @@ public async Task TestPushEvents() [Fact] public async Task TestCreateUpdateAssets() { - using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { Clean = new CleanMetadataTargetConfig { - Assets = true + Assets = true, + Timeseries = false, + Relationships = true, } }; + (handler, pusher) = tester.GetCDFPusher(); + using var extractor = tester.BuildExtractor(true, null, pusher); var rels = Enumerable.Empty(); var tss = Enumerable.Empty(); @@ -340,23 +343,19 @@ public async Task TestCreateUpdateAssets() [Fact] public async Task TestCreateRawAssets() { - using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - Clean = new CleanMetadataTargetConfig - { - Relationships = true, - Assets = false, - Timeseries = true - }, + Clean = new CleanMetadataTargetConfig(), Raw = new RawMetadataTargetConfig { Database = "metadata", AssetsTable = "assets" } }; + (handler, pusher) = tester.GetCDFPusher(); + using var extractor = tester.BuildExtractor(true, null, pusher); var node = new UAObject(tester.Server.Ids.Base.Root, "BaseRoot", null, null, NodeId.Null, null); var rels = Enumerable.Empty(); @@ -366,17 +365,17 @@ public async Task TestCreateRawAssets() handler.FailedRoutes.Add("/raw/dbs/metadata/tables/assets/rows"); Assert.False((await pusher.PushNodes(new[] { node }, tss, rels, update, tester.Source.Token)).RawObjects); Assert.Empty(handler.AssetsRaw); + handler.FailedRoutes.Clear(); // Create one - handler.FailedRoutes.Clear(); - Assert.True((await pusher.PushNodes(new[] { node }, tss, rels, update, tester.Source.Token)).Objects); + Assert.True((await pusher.PushNodes(new[] { node }, tss, rels, update, tester.Source.Token)).RawObjects); Assert.Single(handler.AssetsRaw); Assert.Equal("BaseRoot", handler.AssetsRaw.First().Value.GetProperty("name").GetString()); // Create another, do not overwrite the existing one, due to no update settings var node2 = new UAObject(tester.Server.Ids.Custom.Root, "CustomRoot", null, null, NodeId.Null, null); node.Attributes.Description = "description"; - Assert.True((await pusher.PushNodes(new[] { node, node2 }, tss, rels, update, tester.Source.Token)).Objects); + Assert.True((await pusher.PushNodes(new[] { node, node2 }, tss, rels, update, tester.Source.Token)).RawObjects); Assert.Equal(2, handler.AssetsRaw.Count); Assert.Null(handler.AssetsRaw.First().Value.GetProperty("description").GetString()); Assert.Null(handler.AssetsRaw.Last().Value.GetProperty("description").GetString()); @@ -386,7 +385,6 @@ public async Task TestCreateRawAssets() [Fact] public async Task TestUpdateRawAssets() { - using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig @@ -404,6 +402,8 @@ public async Task TestUpdateRawAssets() TimeseriesTable = "timeseries" } }; + (handler, pusher) = tester.GetCDFPusher(); + using var extractor = tester.BuildExtractor(true, null, pusher); var node = new UAObject(tester.Server.Ids.Base.Root, "BaseRoot", null, null, NodeId.Null, null); var rels = Enumerable.Empty(); var tss = Enumerable.Empty(); @@ -436,7 +436,6 @@ public async Task TestUpdateRawAssets() [Fact] public async Task TestCreateUpdateTimeseries() { - using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { @@ -447,6 +446,8 @@ public async Task TestCreateUpdateTimeseries() Timeseries = true }, }; + (handler, pusher) = tester.GetCDFPusher(); + using var extractor = tester.BuildExtractor(true, null, pusher); var dt = new UADataType(DataTypeIds.Double); @@ -515,7 +516,6 @@ public async Task TestCreateUpdateTimeseries() [Fact] public async Task TestCreateRawTimeseries() { - using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig @@ -533,6 +533,8 @@ public async Task TestCreateRawTimeseries() TimeseriesTable = "timeseries" } }; + (handler, pusher) = tester.GetCDFPusher(); + using var extractor = tester.BuildExtractor(true, null, pusher); var dt = new UADataType(DataTypeIds.Double); @@ -567,7 +569,6 @@ public async Task TestCreateRawTimeseries() [Fact] public async Task TestUpdateRawTimeseries() { - using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig @@ -586,6 +587,8 @@ public async Task TestUpdateRawTimeseries() } }; + (handler, pusher) = tester.GetCDFPusher(); + using var extractor = tester.BuildExtractor(true, null, pusher); var dt = new UADataType(DataTypeIds.Double); var nodeToAssetIds = (Dictionary)pusher.GetType() @@ -634,8 +637,6 @@ public async Task TestNodeCallback() ReportOnEmpty = true }; - (handler, pusher) = tester.GetCDFPusher(); - using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { @@ -647,6 +648,8 @@ public async Task TestNodeCallback() }, }; + (handler, pusher) = tester.GetCDFPusher(); + using var extractor = tester.BuildExtractor(true, null, pusher); var dt = new UADataType(DataTypes.Double); @@ -1215,6 +1218,7 @@ public async Task TestCDFAsSourceData() tester.Config.Extraction.RootNode = tester.Ids.Custom.Root.ToProtoNodeId(tester.Client); tester.Config.History.Enabled = true; + (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); // Nothing in CDF @@ -1292,6 +1296,7 @@ public async Task TestCDFAsSourceEvents() tester.Config.Subscriptions.DataPoints = true; tester.Config.Extraction.RootNode = tester.Ids.Event.Root.ToProtoNodeId(tester.Client); + (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); // Nothing in CDF @@ -1357,6 +1362,7 @@ public async Task TestCDFNodeSetBackground() { Relationships = true, Assets = false, + Timeseries = true, }, Raw = new RawMetadataTargetConfig { @@ -1372,6 +1378,7 @@ public async Task TestCDFNodeSetBackground() tester.Config.Subscriptions.DataPoints = true; tester.Config.Extraction.RootNode = tester.Ids.Event.Root.ToProtoNodeId(tester.Client); tester.Config.Source.AltSourceBackgroundBrowse = true; + (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(true, null, pusher); diff --git a/Test/Unit/DeleteTest.cs b/Test/Unit/DeleteTest.cs index 22ad7a1f..00cb6643 100644 --- a/Test/Unit/DeleteTest.cs +++ b/Test/Unit/DeleteTest.cs @@ -376,29 +376,29 @@ public async Task TestFullRunDelete() [Fact] public async Task TestCDFDelete() { - var (handler, pusher) = tester.GetCDFPusher(); tester.Config.Extraction.Deletes.Enabled = true; tester.Config.Extraction.RootNode = tester.Ids.Audit.Root.ToProtoNodeId(tester.Client); tester.Config.Extraction.Relationships.Enabled = true; tester.Config.Extraction.Relationships.Hierarchical = true; tester.Config.Cognite.DeleteRelationships = true; + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + Clean = new CleanMetadataTargetConfig + { + Assets = true, + Timeseries = true, + Relationships = true + } + }; using var stateStore = new MockStateStore(); + var (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(pushers: pusher, stateStore: stateStore); var addedId = tester.Server.Server.AddObject(tester.Ids.Audit.Root, "NodeToDelete"); var addedVarId = tester.Server.Server.AddVariable(tester.Ids.Audit.Root, "VariableToDelete", DataTypeIds.Double); var addedExtId = tester.Client.GetUniqueId(addedId); var addedVarExtId = tester.Client.GetUniqueId(addedVarId); - - tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig - { - Clean = new CleanMetadataTargetConfig - { - Assets = true, - // Timeseries = true - } - }; // Run the extractor and verify that we got the node. await extractor.RunExtractor(true); Assert.True(handler.Assets.ContainsKey(addedExtId)); @@ -429,7 +429,6 @@ public async Task TestCDFDelete() [Fact] public async Task TestCDFDeleteRaw() { - var (handler, pusher) = tester.GetCDFPusher(); tester.Config.Extraction.Deletes.Enabled = true; tester.Config.Extraction.RootNode = tester.Ids.Audit.Root.ToProtoNodeId(tester.Client); tester.Config.Extraction.Relationships.Enabled = true; @@ -440,6 +439,8 @@ public async Task TestCDFDeleteRaw() Clean = new CleanMetadataTargetConfig { Timeseries = true, + Assets = true, + Relationships = true }, Raw = new RawMetadataTargetConfig { @@ -450,6 +451,7 @@ public async Task TestCDFDeleteRaw() } }; using var stateStore = new MockStateStore(); + var (handler, pusher) = tester.GetCDFPusher(); using var extractor = tester.BuildExtractor(pushers: pusher, stateStore: stateStore); From fd95145c317e999e97c39fbbdd3705c07b8967aa Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Tue, 25 Jul 2023 21:03:28 +0200 Subject: [PATCH 19/26] fix: failing test --- Extractor/Pushers/CDFPusher.cs | 2 +- Extractor/Pushers/Writers/WriterUtils.cs | 2 +- Test/Integration/LauncherTests.cs | 2 +- Test/Unit/CDFPusherTest.cs | 54 ++++++++++++------------ 4 files changed, 29 insertions(+), 31 deletions(-) diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index 0339b284..cc61e2c8 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -975,7 +975,7 @@ await destination.InsertRawRowsAsync( /// Task private async Task PushReferences(IEnumerable references, BrowseReport report, PushResult result, CancellationToken token) { - if (!references.Any() && cdfWriter.Timeseries == null && cdfWriter.Raw == null) return; + if (!references.Any() && cdfWriter.Relationships == null && cdfWriter.Raw == null) return; var relationships = references .Select(reference => reference.ToRelationship(config.DataSet?.Id, Extractor)) diff --git a/Extractor/Pushers/Writers/WriterUtils.cs b/Extractor/Pushers/Writers/WriterUtils.cs index 4e0dbcbb..e4db3ff3 100644 --- a/Extractor/Pushers/Writers/WriterUtils.cs +++ b/Extractor/Pushers/Writers/WriterUtils.cs @@ -30,7 +30,7 @@ public static void AddWriters(this IServiceCollection services, CancellationToke ); }); } - if (config.Cognite?.MetadataTargets?.Clean?.Assets ?? false) + if (config.Cognite?.MetadataTargets?.Clean?.Relationships ?? false) { services.AddSingleton(provider => { var destination = provider.GetRequiredService(); diff --git a/Test/Integration/LauncherTests.cs b/Test/Integration/LauncherTests.cs index 4ac2242a..ed142a48 100644 --- a/Test/Integration/LauncherTests.cs +++ b/Test/Integration/LauncherTests.cs @@ -480,7 +480,7 @@ public void TestVerifyAndBuildConfig() log.Events.Clear(); config.Source.EndpointUrl = tester.EndpointUrl; method.Invoke(typeof(ExtractorStarter), new object[] { log, config, setup, options, "config" }); - Assert.Equal(2, log.Events.Where(evt => evt.LogLevel == Microsoft.Extensions.Logging.LogLevel.Warning).Count()); + Assert.Equal(3, log.Events.Where(evt => evt.LogLevel == Microsoft.Extensions.Logging.LogLevel.Warning).Count()); // events idprefix config.Extraction.IdPrefix = "events."; diff --git a/Test/Unit/CDFPusherTest.cs b/Test/Unit/CDFPusherTest.cs index a95e9a43..3a15e2da 100644 --- a/Test/Unit/CDFPusherTest.cs +++ b/Test/Unit/CDFPusherTest.cs @@ -737,10 +737,6 @@ public async Task TestRawNodeCallback() tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - Clean = new CleanMetadataTargetConfig - { - Timeseries = false, - }, Raw = new RawMetadataTargetConfig { Database = "metadata", @@ -762,12 +758,12 @@ await pusher.PushNodes(Enumerable.Empty(), Enumerable.Empty(), Enumerable.Empty(), Enumerable.Empty(), Enumerable.Empty(); var tss = Enumerable.Empty(); @@ -962,9 +960,9 @@ public async Task TestCreateRelationships() handler.FailedRoutes.Add("/relationships"); Assert.False((await pusher.PushNodes(assets, tss, references, update, tester.Source.Token)).References); Assert.Empty(handler.Relationships); + handler.FailedRoutes.Clear(); // Push successful - handler.FailedRoutes.Clear(); Assert.True((await pusher.PushNodes(assets, tss, references, update, tester.Source.Token)).References); Assert.Equal(2, handler.Relationships.Count); From e52f1a1fb12ad740d40a67c549253498f84f581d Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Wed, 26 Jul 2023 10:29:36 +0200 Subject: [PATCH 20/26] test: test all pusher destination --- Extractor/Pushers/CDFPusher.cs | 21 ++++++++--- Test/Unit/CDFPusherTest.cs | 66 ++++++++++++++++++++++++++++++++++ 2 files changed, 82 insertions(+), 5 deletions(-) diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index cc61e2c8..a54129ad 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -335,6 +335,9 @@ public async Task PushNodes(IEnumerable objects, result.Objects = false; result.References = false; result.Variables = false; + result.RawObjects = false; + result.RawReferences = false; + result.RawVariables = false; nodeEnsuringFailures.Inc(); return result; } @@ -353,7 +356,14 @@ public async Task PushNodes(IEnumerable objects, log.LogInformation("Finish pushing nodes to CDF"); - if (result.Objects && result.References && result.Variables && result.RawObjects && result.RawVariables && result.RawReferences) + if ( + result.Objects + && result.References + && result.Variables + && result.RawObjects + && result.RawVariables + && result.RawReferences + ) { if (callback != null) { @@ -377,13 +387,14 @@ public async Task PushNodes(IEnumerable objects, /// Push result /// Cancellation token /// Task - private async Task PushFdm(IEnumerable objects, IEnumerable variables, IEnumerable references, PushResult result, CancellationToken token) + private async Task PushFdm(IEnumerable objects, + IEnumerable variables, IEnumerable references, PushResult result, CancellationToken token) { if (cdfWriter.FDM == null) return; bool pushResult = true; try { - pushResult = await cdfWriter.FDM!.PushNodes(objects, variables, references, Extractor, token); + pushResult = await cdfWriter.FDM.PushNodes(objects, variables, references, Extractor, token); } catch { @@ -635,11 +646,11 @@ private async Task PushAssets(IEnumerable objects, TypeUpdateConfig if (!objects.Any() && cdfWriter.Assets == null && cdfWriter.Raw == null) return; var assetsMap = MapAssets(objects); - if (CleanMetadataTargetConfig?.Assets ?? false && cdfWriter.Assets != null) + if (cdfWriter.Assets != null) { await PushCleanAssets(assetsMap, update, report, result, token); } - if (RawMetadataTargetConfig?.Database != null && RawMetadataTargetConfig?.AssetsTable != null) + if (cdfWriter.Raw != null && RawMetadataTargetConfig?.AssetsTable != null) { await PushRawAssets(assetsMap, update, report, result, token); } diff --git a/Test/Unit/CDFPusherTest.cs b/Test/Unit/CDFPusherTest.cs index 3a15e2da..c030a614 100644 --- a/Test/Unit/CDFPusherTest.cs +++ b/Test/Unit/CDFPusherTest.cs @@ -1405,6 +1405,72 @@ public async Task TestCDFNodeSetBackground() await TestUtils.WaitForCondition(() => handler.TimeseriesRaw.Count > 0, 10); } + + [Fact] + public async Task TestAllDestinationsActive() + { + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + Clean = new CleanMetadataTargetConfig + { + Relationships = true, + Assets = true, + Timeseries = true, + }, + Raw = new RawMetadataTargetConfig + { + Database = "metadata", + TimeseriesTable = "timeseries", + AssetsTable = "assets", + RelationshipsTable = "relationships" + }, + }; + tester.Config.Extraction.RootNode = new ProtoNodeId + { + NamespaceUri = "http://opcfoundation.org/UA/", + NodeId = "i=86" + }; + tester.Config.Extraction.NodeTypes.AsNodes = true; + tester.Config.Extraction.Relationships.Enabled = true; + tester.Config.Extraction.Relationships.Hierarchical = true; + tester.Config.Extraction.DataTypes.AutoIdentifyTypes = true; + + (handler, pusher) = tester.GetCDFPusher(); + handler.Assets.Clear(); + handler.AssetsRaw.Clear(); + handler.Timeseries.Clear(); + handler.TimeseriesRaw.Clear(); + handler.Relationships.Clear(); + handler.RelationshipsRaw.Clear(); + var extractor = tester.BuildExtractor(clear: true, pushers: pusher); + + var update = new UpdateConfig(); + var dt = new UADataType(DataTypeIds.Double); + var node = new UAObject(tester.Server.Ids.Base.Root, "BaseRoot", null, null, NodeId.Null, null); + var variable = new UAVariable(tester.Server.Ids.Base.DoubleVar1, "Variable 1", null, null, new NodeId("parent"), null); + variable.FullAttributes.DataType = dt; + var rel = new UAReference(ReferenceTypeIds.Organizes, true, new NodeId("source"), new NodeId("target2"), true, false, false, extractor.TypeManager); + + var result = await pusher.PushNodes(new[] { node }, new [] { variable }, new[] { rel }, update, tester.Source.Token); + + Assert.True(result.Objects); + Assert.True(result.RawObjects); + + Assert.True(result.Variables); + Assert.True(result.RawVariables); + + Assert.True(result.References); + Assert.True(result.RawReferences); + + Assert.Single(handler.Assets); + Assert.Single(handler.AssetsRaw); + + Assert.Single(handler.Timeseries); + Assert.Single(handler.TimeseriesRaw); + + Assert.Single(handler.Relationships); + Assert.Single(handler.RelationshipsRaw); + } #endregion } } From 0a1d40c18fdc1ae90d0a2a6c7a7d846865710989 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Wed, 26 Jul 2023 11:24:00 +0200 Subject: [PATCH 21/26] test: remove all clear from all destination test --- Test/Unit/CDFPusherTest.cs | 6 ------ 1 file changed, 6 deletions(-) diff --git a/Test/Unit/CDFPusherTest.cs b/Test/Unit/CDFPusherTest.cs index c030a614..8331ea82 100644 --- a/Test/Unit/CDFPusherTest.cs +++ b/Test/Unit/CDFPusherTest.cs @@ -1436,12 +1436,6 @@ public async Task TestAllDestinationsActive() tester.Config.Extraction.DataTypes.AutoIdentifyTypes = true; (handler, pusher) = tester.GetCDFPusher(); - handler.Assets.Clear(); - handler.AssetsRaw.Clear(); - handler.Timeseries.Clear(); - handler.TimeseriesRaw.Clear(); - handler.Relationships.Clear(); - handler.RelationshipsRaw.Clear(); var extractor = tester.BuildExtractor(clear: true, pushers: pusher); var update = new UpdateConfig(); From 7deb336545b440467173c3b7ad9a939fb5fd540f Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Wed, 26 Jul 2023 12:17:09 +0200 Subject: [PATCH 22/26] test: without all destination --- Test/Unit/CDFPusherTest.cs | 102 ++++++++++++++++++------------------- 1 file changed, 51 insertions(+), 51 deletions(-) diff --git a/Test/Unit/CDFPusherTest.cs b/Test/Unit/CDFPusherTest.cs index 8331ea82..3c53162c 100644 --- a/Test/Unit/CDFPusherTest.cs +++ b/Test/Unit/CDFPusherTest.cs @@ -1406,65 +1406,65 @@ public async Task TestCDFNodeSetBackground() await TestUtils.WaitForCondition(() => handler.TimeseriesRaw.Count > 0, 10); } - [Fact] - public async Task TestAllDestinationsActive() - { - tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig - { - Clean = new CleanMetadataTargetConfig - { - Relationships = true, - Assets = true, - Timeseries = true, - }, - Raw = new RawMetadataTargetConfig - { - Database = "metadata", - TimeseriesTable = "timeseries", - AssetsTable = "assets", - RelationshipsTable = "relationships" - }, - }; - tester.Config.Extraction.RootNode = new ProtoNodeId - { - NamespaceUri = "http://opcfoundation.org/UA/", - NodeId = "i=86" - }; - tester.Config.Extraction.NodeTypes.AsNodes = true; - tester.Config.Extraction.Relationships.Enabled = true; - tester.Config.Extraction.Relationships.Hierarchical = true; - tester.Config.Extraction.DataTypes.AutoIdentifyTypes = true; - - (handler, pusher) = tester.GetCDFPusher(); - var extractor = tester.BuildExtractor(clear: true, pushers: pusher); + // [Fact] + // public async Task TestAllDestinationsActive() + // { + // tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + // { + // Clean = new CleanMetadataTargetConfig + // { + // Relationships = true, + // Assets = true, + // Timeseries = true, + // }, + // Raw = new RawMetadataTargetConfig + // { + // Database = "metadata", + // TimeseriesTable = "timeseries", + // AssetsTable = "assets", + // RelationshipsTable = "relationships" + // }, + // }; + // tester.Config.Extraction.RootNode = new ProtoNodeId + // { + // NamespaceUri = "http://opcfoundation.org/UA/", + // NodeId = "i=86" + // }; + // tester.Config.Extraction.NodeTypes.AsNodes = true; + // tester.Config.Extraction.Relationships.Enabled = true; + // tester.Config.Extraction.Relationships.Hierarchical = true; + // tester.Config.Extraction.DataTypes.AutoIdentifyTypes = true; + + // (handler, pusher) = tester.GetCDFPusher(); + // var extractor = tester.BuildExtractor(clear: true, pushers: pusher); - var update = new UpdateConfig(); - var dt = new UADataType(DataTypeIds.Double); - var node = new UAObject(tester.Server.Ids.Base.Root, "BaseRoot", null, null, NodeId.Null, null); - var variable = new UAVariable(tester.Server.Ids.Base.DoubleVar1, "Variable 1", null, null, new NodeId("parent"), null); - variable.FullAttributes.DataType = dt; - var rel = new UAReference(ReferenceTypeIds.Organizes, true, new NodeId("source"), new NodeId("target2"), true, false, false, extractor.TypeManager); + // var update = new UpdateConfig(); + // var dt = new UADataType(DataTypeIds.Double); + // var node = new UAObject(tester.Server.Ids.Base.Root, "BaseRoot", null, null, NodeId.Null, null); + // var variable = new UAVariable(tester.Server.Ids.Base.DoubleVar1, "Variable 1", null, null, new NodeId("parent"), null); + // variable.FullAttributes.DataType = dt; + // var rel = new UAReference(ReferenceTypeIds.Organizes, true, new NodeId("source"), new NodeId("target2"), true, false, false, extractor.TypeManager); - var result = await pusher.PushNodes(new[] { node }, new [] { variable }, new[] { rel }, update, tester.Source.Token); + // var result = await pusher.PushNodes(new[] { node }, new [] { variable }, new[] { rel }, update, tester.Source.Token); - Assert.True(result.Objects); - Assert.True(result.RawObjects); + // Assert.True(result.Objects); + // Assert.True(result.RawObjects); - Assert.True(result.Variables); - Assert.True(result.RawVariables); + // Assert.True(result.Variables); + // Assert.True(result.RawVariables); - Assert.True(result.References); - Assert.True(result.RawReferences); + // Assert.True(result.References); + // Assert.True(result.RawReferences); - Assert.Single(handler.Assets); - Assert.Single(handler.AssetsRaw); + // Assert.Single(handler.Assets); + // Assert.Single(handler.AssetsRaw); - Assert.Single(handler.Timeseries); - Assert.Single(handler.TimeseriesRaw); + // Assert.Single(handler.Timeseries); + // Assert.Single(handler.TimeseriesRaw); - Assert.Single(handler.Relationships); - Assert.Single(handler.RelationshipsRaw); - } + // Assert.Single(handler.Relationships); + // Assert.Single(handler.RelationshipsRaw); + // } #endregion } } From 503a386daa95b76484e52f8dda8d673ce0f73617 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Wed, 26 Jul 2023 14:04:06 +0200 Subject: [PATCH 23/26] fix: failing test --- Test/Unit/CDFPusherTest.cs | 93 +++++++++++++++++--------------------- 1 file changed, 42 insertions(+), 51 deletions(-) diff --git a/Test/Unit/CDFPusherTest.cs b/Test/Unit/CDFPusherTest.cs index 3c53162c..76e64894 100644 --- a/Test/Unit/CDFPusherTest.cs +++ b/Test/Unit/CDFPusherTest.cs @@ -1406,65 +1406,56 @@ public async Task TestCDFNodeSetBackground() await TestUtils.WaitForCondition(() => handler.TimeseriesRaw.Count > 0, 10); } - // [Fact] - // public async Task TestAllDestinationsActive() - // { - // tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig - // { - // Clean = new CleanMetadataTargetConfig - // { - // Relationships = true, - // Assets = true, - // Timeseries = true, - // }, - // Raw = new RawMetadataTargetConfig - // { - // Database = "metadata", - // TimeseriesTable = "timeseries", - // AssetsTable = "assets", - // RelationshipsTable = "relationships" - // }, - // }; - // tester.Config.Extraction.RootNode = new ProtoNodeId - // { - // NamespaceUri = "http://opcfoundation.org/UA/", - // NodeId = "i=86" - // }; - // tester.Config.Extraction.NodeTypes.AsNodes = true; - // tester.Config.Extraction.Relationships.Enabled = true; - // tester.Config.Extraction.Relationships.Hierarchical = true; - // tester.Config.Extraction.DataTypes.AutoIdentifyTypes = true; - - // (handler, pusher) = tester.GetCDFPusher(); - // var extractor = tester.BuildExtractor(clear: true, pushers: pusher); + [Fact] + public async Task TestAllDestinationsActive() + { + tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig + { + Clean = new CleanMetadataTargetConfig + { + Relationships = true, + Assets = true, + Timeseries = true, + }, + Raw = new RawMetadataTargetConfig + { + Database = "metadata", + TimeseriesTable = "timeseries", + AssetsTable = "assets", + RelationshipsTable = "relationships" + }, + }; + + (handler, pusher) = tester.GetCDFPusher(); + var extractor = tester.BuildExtractor(true, null, pusher); - // var update = new UpdateConfig(); - // var dt = new UADataType(DataTypeIds.Double); - // var node = new UAObject(tester.Server.Ids.Base.Root, "BaseRoot", null, null, NodeId.Null, null); - // var variable = new UAVariable(tester.Server.Ids.Base.DoubleVar1, "Variable 1", null, null, new NodeId("parent"), null); - // variable.FullAttributes.DataType = dt; - // var rel = new UAReference(ReferenceTypeIds.Organizes, true, new NodeId("source"), new NodeId("target2"), true, false, false, extractor.TypeManager); + var update = new UpdateConfig(); + var dt = new UADataType(DataTypeIds.Double); + var node = new UAObject(tester.Server.Ids.Base.Root, "BaseRoot", null, null, NodeId.Null, null); + var variable = new UAVariable(tester.Server.Ids.Base.DoubleVar1, "Variable 1", null, null, new NodeId("parent"), null); + variable.FullAttributes.DataType = dt; + var rel = new UAReference(ReferenceTypeIds.Organizes, true, new NodeId("source"), new NodeId("target2"), true, false, false, extractor.TypeManager); - // var result = await pusher.PushNodes(new[] { node }, new [] { variable }, new[] { rel }, update, tester.Source.Token); + var result = await pusher.PushNodes(new[] { node }, new [] { variable }, new[] { rel }, update, tester.Source.Token); - // Assert.True(result.Objects); - // Assert.True(result.RawObjects); + Assert.True(result.Objects); + Assert.True(result.RawObjects); - // Assert.True(result.Variables); - // Assert.True(result.RawVariables); + Assert.True(result.Variables); + Assert.True(result.RawVariables); - // Assert.True(result.References); - // Assert.True(result.RawReferences); + Assert.True(result.References); + Assert.True(result.RawReferences); - // Assert.Single(handler.Assets); - // Assert.Single(handler.AssetsRaw); + Assert.Single(handler.Assets); + Assert.Single(handler.AssetsRaw); - // Assert.Single(handler.Timeseries); - // Assert.Single(handler.TimeseriesRaw); + Assert.Single(handler.Timeseries); + Assert.Single(handler.TimeseriesRaw); - // Assert.Single(handler.Relationships); - // Assert.Single(handler.RelationshipsRaw); - // } + Assert.Single(handler.Relationships); + Assert.Single(handler.RelationshipsRaw); + } #endregion } } From d14887742f8088804f45858d513aca0868cc5bd0 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Thu, 27 Jul 2023 09:03:05 +0200 Subject: [PATCH 24/26] fix: failing test --- Extractor/Pushers/CDFPusher.cs | 4 ++-- Test/Unit/CDFPusherTest.cs | 12 +++--------- Test/Utils/BaseExtractorTestFixture.cs | 14 ++++++++++---- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/Extractor/Pushers/CDFPusher.cs b/Extractor/Pushers/CDFPusher.cs index a54129ad..ebfca370 100644 --- a/Extractor/Pushers/CDFPusher.cs +++ b/Extractor/Pushers/CDFPusher.cs @@ -345,7 +345,7 @@ public async Task PushNodes(IEnumerable objects, var tasks = new List(); tasks.Add(PushAssets(objects, update.Objects, report, result, token)); - + tasks.Add(PushTimeseries(variables, update.Variables, report, result, token)); tasks.Add(PushReferences(references, report, result, token)); @@ -786,7 +786,7 @@ private ConcurrentDictionary MapTimeseries(IEnumerableTask private async Task PushTimeseries(IEnumerable variables, TypeUpdateConfig update, BrowseReport report, PushResult result, CancellationToken token) { - if (!variables.Any()) return; + if (!variables.Any() && cdfWriter.Timeseries == null && cdfWriter.Raw == null) return; var timeseriesMap = MapTimeseries(variables); await PushCleanTimeseries(timeseriesMap, update, report, result, token); diff --git a/Test/Unit/CDFPusherTest.cs b/Test/Unit/CDFPusherTest.cs index 76e64894..30f994a1 100644 --- a/Test/Unit/CDFPusherTest.cs +++ b/Test/Unit/CDFPusherTest.cs @@ -281,7 +281,7 @@ public async Task TestCreateUpdateAssets() { Assets = true, Timeseries = false, - Relationships = true, + Relationships = false, } }; (handler, pusher) = tester.GetCDFPusher(); @@ -1356,12 +1356,6 @@ public async Task TestCDFNodeSetBackground() }; tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig { - Clean = new CleanMetadataTargetConfig - { - Relationships = true, - Assets = false, - Timeseries = true, - }, Raw = new RawMetadataTargetConfig { Database = "metadata", @@ -1413,15 +1407,15 @@ public async Task TestAllDestinationsActive() { Clean = new CleanMetadataTargetConfig { - Relationships = true, Assets = true, Timeseries = true, + Relationships = true, }, Raw = new RawMetadataTargetConfig { Database = "metadata", - TimeseriesTable = "timeseries", AssetsTable = "assets", + TimeseriesTable = "timeseries", RelationshipsTable = "relationships" }, }; diff --git a/Test/Utils/BaseExtractorTestFixture.cs b/Test/Utils/BaseExtractorTestFixture.cs index 576d25a7..8559872e 100644 --- a/Test/Utils/BaseExtractorTestFixture.cs +++ b/Test/Utils/BaseExtractorTestFixture.cs @@ -16,6 +16,7 @@ using System.Threading; using System.Threading.Tasks; using Xunit; +using Microsoft.Extensions.DependencyInjection.Extensions; namespace Test.Utils { @@ -163,10 +164,15 @@ public async Task ClearLiteDB(InfluxDBClient client) public (CDFMockHandler, CDFPusher) GetCDFPusher() { - CommonTestUtils.AddDummyProvider("test", CDFMockHandler.MockMode.None, true, Services); - Services.AddCogniteClient("appid", null, true, true, false); - Services.AddWriters(Source.Token, Config); - var provider = Services.BuildServiceProvider(); + var newServices = new ServiceCollection(); + foreach (var service in Services) { + + newServices.Add(service); + } + CommonTestUtils.AddDummyProvider("test", CDFMockHandler.MockMode.None, true, newServices); + newServices.AddCogniteClient("appid", null, true, true, false); + newServices.AddWriters(Source.Token, Config); + var provider = newServices.BuildServiceProvider(); var destination = provider.GetRequiredService(); var pusher = new CDFPusher(Provider.GetRequiredService>(), Config, Config.Cognite, destination, provider); From a45519eb1c69ccc857d73485a12e73e4540b1664 Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Thu, 27 Jul 2023 09:43:00 +0200 Subject: [PATCH 25/26] fix: failing test --- Test/Unit/CDFPusherTest.cs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Test/Unit/CDFPusherTest.cs b/Test/Unit/CDFPusherTest.cs index 30f994a1..65774bd2 100644 --- a/Test/Unit/CDFPusherTest.cs +++ b/Test/Unit/CDFPusherTest.cs @@ -997,7 +997,6 @@ public async Task TestCreateRelationships() [Fact] public async Task TestCreateRawRelationships() { - using var extractor = tester.BuildExtractor(true, null, pusher); CommonTestUtils.ResetMetricValue("opcua_node_ensure_failures_cdf"); tester.Config.Cognite.MetadataTargets = new MetadataTargetsConfig @@ -1009,13 +1008,15 @@ public async Task TestCreateRawRelationships() } }; tester.Config.Extraction.Relationships.Enabled = true; + (handler, pusher) = tester.GetCDFPusher(); + using var extractor = tester.BuildExtractor(true, null, pusher); var assets = Enumerable.Empty(); var tss = Enumerable.Empty(); var update = new UpdateConfig(); // Push none - Assert.True((await pusher.PushNodes(assets, tss, Enumerable.Empty(), update, tester.Source.Token)).References); + Assert.True((await pusher.PushNodes(assets, tss, Enumerable.Empty(), update, tester.Source.Token)).RawReferences); // Fail to push var references = new List @@ -1027,9 +1028,9 @@ public async Task TestCreateRawRelationships() handler.FailedRoutes.Add("/raw/dbs/metadata/tables/relationships/rows"); Assert.False((await pusher.PushNodes(assets, tss, references, update, tester.Source.Token)).RawReferences); Assert.Empty(handler.RelationshipsRaw); + handler.FailedRoutes.Clear(); // Push successful - handler.FailedRoutes.Clear(); Assert.True((await pusher.PushNodes(assets, tss, references, update, tester.Source.Token)).RawReferences); Assert.Equal(2, handler.RelationshipsRaw.Count); From 25c1fbba0ed05b94ba9ba65923f52ef04708a12c Mon Sep 17 00:00:00 2001 From: Babatunde Aromire Date: Thu, 27 Jul 2023 13:08:14 +0200 Subject: [PATCH 26/26] docs: documentation updated --- config/config.example.yml | 23 +++++++++++++++++++++++ schema/cognite_config.schema.json | 6 +++--- 2 files changed, 26 insertions(+), 3 deletions(-) diff --git a/config/config.example.yml b/config/config.example.yml index ce5ef9ef..55fc0d8a 100644 --- a/config/config.example.yml +++ b/config/config.example.yml @@ -257,6 +257,29 @@ cognite: # Table to store relationships in relationships-table: + # CDF target configurations. Regardless of the configuration here, a form of timeseries is always created (either full or minimal) for now. + # In the future however, users are expected to create target specific destinations. + metadata-targets: + # Configuration to enable some form of clean i.e. assets, timeseries or relationships. + clean: + # Toggle CDF assets destination as a target + assets: false + # Toggle CDF timeseries destination as a target + timeseries: false + # Toggle CDF relationships destination as a target + relationships: false + # Configuration for some form or CDF raw destination. + # When this is used, it is required that 'database' name and any of the other keys are provided + raw: + # Database name + database: + # Assets table name + assets-table: + # Timeseries table name + timeseries-table: + # Relationships table name + relationships-table: + # Read from CDF instead of OPC-UA when starting, to speed up start on slow servers. # Requires extraction.data-types.expand-node-ids and append-internal-values to be set to true. diff --git a/schema/cognite_config.schema.json b/schema/cognite_config.schema.json index 65662879..41b71662 100644 --- a/schema/cognite_config.schema.json +++ b/schema/cognite_config.schema.json @@ -34,7 +34,7 @@ "description": "Metadata targets for objects, variable and references in CDF.", "unevaluatedProperties": false, "properties": { - "raw-metadata": { + "raw": { "type": "object", "description": "Details of CDF raw (database and table).", "unevaluatedProperties": false, @@ -58,7 +58,7 @@ } } }, - "clean-metadata": { + "clean": { "unevaluatedProperties": false, "description": "Enable or disable assets, timeseries and/or relationships", "type": "object", @@ -77,7 +77,7 @@ } } }, - "flexible-data-models": { + "data-models": { "unevaluatedProperties": false, "description": "configuration for flexible data models in CDF", "type": "object",