diff --git a/.gitconfig b/.gitconfig
new file mode 100644
index 0000000..1e25f91
--- /dev/null
+++ b/.gitconfig
@@ -0,0 +1,2 @@
+[core]
+ autocrlf = false
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 0000000..c9c8df9
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,27 @@
+name: Attach Custom Connector as release asset
+
+on:
+ release:
+ types: [published]
+
+jobs:
+ build-and-attach:
+ runs-on: windows-latest
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v3
+
+ - name: Set up MSBuild
+ uses: microsoft/setup-msbuild@v1
+
+ - name: Build .mez file
+ run: |
+ msbuild enlyze.pq.proj /p:Configuration=Release
+
+ - name: Upload `.mez` as release asset
+ uses: softprops/action-gh-release@v1
+ with:
+ files: bin/AnyCPU/Release/enlyze-powerbi.mez
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..db4f53f
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+*.DS_Store
+bin/
diff --git a/.vscode/launch.json b/.vscode/launch.json
new file mode 100644
index 0000000..a7e1185
--- /dev/null
+++ b/.vscode/launch.json
@@ -0,0 +1,14 @@
+{
+ // Use IntelliSense to learn about possible attributes.
+ // Hover to view descriptions of existing attributes.
+ // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
+ "version": "0.2.0",
+ "configurations": [
+ {
+ "type": "powerquery",
+ "request": "launch",
+ "name": "Evaluate power query file.",
+ "program": "${workspaceFolder}/${command:AskForPowerQueryFileName}"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 0000000..b305bf5
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,5 @@
+{
+ "powerquery.sdk.defaultQueryFile": "${workspaceFolder}\\enlyze.query.pq",
+ "powerquery.sdk.defaultExtension": "${workspaceFolder}\\bin\\AnyCPU\\Debug\\enlyze-powerbi.mez",
+ "powerquery.general.mode": "SDK"
+}
diff --git a/.vscode/tasks.json b/.vscode/tasks.json
new file mode 100644
index 0000000..8ba8f8c
--- /dev/null
+++ b/.vscode/tasks.json
@@ -0,0 +1,22 @@
+{
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "label": "Build and deploy",
+ "type": "shell",
+ "command": "powershell.exe",
+ "args": [
+ "-ExecutionPolicy",
+ "Bypass",
+ "-File",
+ "${workspaceFolder}/push-extension.ps1"
+ ],
+ "presentation": {
+ "reveal": "always",
+ "panel": "new"
+ },
+ "group": "build",
+ "problemMatcher": []
+ }
+ ]
+}
\ No newline at end of file
diff --git a/ApiClient.pqm b/ApiClient.pqm
new file mode 100644
index 0000000..ddffbbd
--- /dev/null
+++ b/ApiClient.pqm
@@ -0,0 +1,70 @@
+let
+ BaseUrl = "https://app.enlyze.com/api/v2",
+ CommonHeaders = [
+ #"Content-Type" = "application/json",
+ #"user-agent" = "enlyze-powerbi/1.0.0"
+ ],
+ CreateHeaders = (apiToken as text) as record =>
+ Record.Combine({CommonHeaders, [#"Authorization" = "Bearer " & apiToken]}),
+ FetchPage = (apiPath as text, cursor as nullable text, optional queryParams as nullable record) =>
+ let
+ apiUrl = BaseUrl & apiPath,
+ apiToken = Extension.CurrentCredential()[Key],
+ headers = CreateHeaders(apiToken),
+ combinedQueryParams =
+ if queryParams <> null then
+ Record.Combine({queryParams, [cursor = cursor]})
+ else
+ [cursor = cursor],
+ fieldNames = Record.FieldNames(combinedQueryParams),
+ nullValueFields = List.Select(fieldNames, each Record.Field(combinedQueryParams, _) = null),
+ queryParamsNonNull = Record.RemoveFields(combinedQueryParams, nullValueFields),
+ queryString = Uri.BuildQueryString(queryParamsNonNull),
+ apiUrlWithQueryParams = if Text.Length(queryString) > 0 then apiUrl & "?" & queryString else apiUrl,
+ parsedResponse = Json.Document(Web.Contents(apiUrlWithQueryParams, [Headers = headers]))
+ in
+ parsedResponse,
+ FetchPaginated = (apiPath as text, cursor as nullable text, optional queryParams as nullable record) as list =>
+ let
+ currentPage = FetchPage(apiPath, cursor, queryParams),
+ nextCursor = currentPage[metadata][next_cursor],
+ data = currentPage[data],
+ remainingData = if nextCursor = null then {} else @FetchPaginated(apiPath, nextCursor, queryParams)
+ in
+ List.Combine({data, remainingData}),
+ PostRequestPage = (apiPath as text, body as record, cursor as nullable text) as record =>
+ let
+ bodyWithCursor = if cursor <> null then Record.Combine({body, [cursor = cursor]}) else body,
+ url = BaseUrl & apiPath,
+ apiToken = Extension.CurrentCredential()[Key],
+ headers = CreateHeaders(apiToken),
+ response = Web.Contents(
+ url,
+ [
+ Headers = headers,
+ Content = Json.FromValue(bodyWithCursor),
+ ManualStatusHandling = {400, 401, 403, 404, 422, 500}
+ ]
+ ),
+ statusCode = Value.Metadata(response)[Response.Status],
+ parsedResponse =
+ if statusCode = 200 then
+ Json.Document(response)
+ else
+ error "HTTP Error: " & Text.From(statusCode) & ". Response body: " & Text.FromBinary(response)
+ in
+ parsedResponse,
+ PaginatedPostRequest = (apiPath as text, body as record, optional cursor as nullable text) as list =>
+ let
+ currentPage = PostRequestPage(apiPath, body, cursor),
+ dataMaybeRecord = currentPage[data],
+ data = if Type.Is(Value.Type(dataMaybeRecord), List.Type) then dataMaybeRecord else {dataMaybeRecord},
+ nextCursor = currentPage[metadata][next_cursor],
+ remainingData = if nextCursor = null then {} else @PaginatedPostRequest(apiPath, body, nextCursor)
+ in
+ List.Combine({data, remainingData})
+in
+ [
+ FetchPaginated = FetchPaginated,
+ PaginatedPostRequest = PaginatedPostRequest
+ ]
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..ab2ef31
--- /dev/null
+++ b/README.md
@@ -0,0 +1,36 @@
+# ENLYZE Power BI Integration
+
+The ENLYZE Power BI Integration enables users to pull in their production data into Power BI. The project is based on the [Power Query Connector Development SDK](https://github.com/microsoft/vscode-powerquery-sdk).
+
+## Features
+
+The ENLYZE Power BI Integration currently supports querying the following resources:
+
+- Sites
+- Machines
+- Production Runs
+- Products
+- Downtimes
+- Productivity Metrics for machines
+
+## Installation
+
+In order to get started with the ENLYZE Power BI Integration,
+
+1. Download the `enlyze-powerbi.mez` file from the [Latest Release](/releases/latest/download/enlyze-powerbi.mez)
+2. Follow the steps [described in the official documentation](https://learn.microsoft.com/en-us/power-bi/connect-data/desktop-connector-extensibility#custom-connectors).
+3. Restart Power BI
+
+## Fetching data
+
+With Power BI open, click on get data:
+
+![menu bar](docs/images/menu-bar.png)
+
+Then, search for enlyze:
+
+![connector selection](docs/images/connector-select.png)
+
+Finally, select the dataset you want to query:
+
+![dataset selection](docs/images/dataset-select.png)
diff --git a/docs/images/connector-select.png b/docs/images/connector-select.png
new file mode 100644
index 0000000..7036ad0
Binary files /dev/null and b/docs/images/connector-select.png differ
diff --git a/docs/images/dataset-select.png b/docs/images/dataset-select.png
new file mode 100644
index 0000000..bed98be
Binary files /dev/null and b/docs/images/dataset-select.png differ
diff --git a/docs/images/menu-bar.png b/docs/images/menu-bar.png
new file mode 100644
index 0000000..a956c2e
Binary files /dev/null and b/docs/images/menu-bar.png differ
diff --git a/downtimes/Downtimes.TableSchema.pqm b/downtimes/Downtimes.TableSchema.pqm
new file mode 100644
index 0000000..b6be704
--- /dev/null
+++ b/downtimes/Downtimes.TableSchema.pqm
@@ -0,0 +1,16 @@
+let
+ DowntimesTableSchema = type table [
+ uuid = text,
+ machine = text,
+ #"type" = text,
+ start = datetimezone,
+ end = nullable datetimezone,
+ updated_first_name = nullable text,
+ updated_last_name = nullable text,
+ updated_timestamp = nullable datetimezone,
+ reason_uuid = nullable text,
+ reason_name = nullable text,
+ reason_category = nullable text
+ ]
+in
+ DowntimesTableSchema
diff --git a/downtimes/Downtimes.Transform.pqm b/downtimes/Downtimes.Transform.pqm
new file mode 100644
index 0000000..c8e0909
--- /dev/null
+++ b/downtimes/Downtimes.Transform.pqm
@@ -0,0 +1,40 @@
+let
+ loadModule = (fileName as text) =>
+ let
+ binary = Extension.Contents(fileName), asText = Text.FromBinary(binary)
+ in
+ try
+ Expression.Evaluate(asText, #shared) catch (e) =>
+ error
+ [
+ Reason = "Extension.LoadModule Failure",
+ Message.Format = "Loading '#{0}' failed - '#{1}': '#{2}'",
+ Message.Parameters = {fileName, e[Reason], e[Message]},
+ Detail = [File = fileName, Error = e]
+ ],
+ TableSchema = loadModule("Downtimes.TableSchema.pqm"),
+ Table.ChangeType = loadModule("Table.ChangeType.pqm"),
+ TimeseriesData = loadModule("TimeseriesData.pqm"),
+ TransformDowntimes = (downtimes as list) as table =>
+ let
+ downtimesTable = Table.FromList(downtimes, Splitter.SplitByNothing(), null, null, ExtraValues.Error),
+ namedTable = Value.ReplaceMetadata(downtimesTable, Value.Metadata(downtimesTable) & [Name = "Downtimes"]),
+ expandedTable = Table.ExpandRecordColumn(
+ namedTable, "Column1", {"uuid", "machine", "comment", "type", "updated", "reason", "start", "end"}
+ ),
+ expandedUpdated = Table.ExpandRecordColumn(
+ expandedTable,
+ "updated",
+ {"first_name", "last_name", "timestamp"},
+ {"updated_first_name", "updated_last_name", "updated_timestamp"}
+ ),
+ expandedReason = Table.ExpandRecordColumn(
+ expandedUpdated,
+ "reason",
+ {"uuid", "name", "category"},
+ {"reason_uuid", "reason_name", "reason_category"}
+ )
+ in
+ Table.ChangeType(expandedReason, TableSchema)
+in
+ [TransformDowntimes = TransformDowntimes]
diff --git a/enlyze.pq b/enlyze.pq
new file mode 100644
index 0000000..7388569
--- /dev/null
+++ b/enlyze.pq
@@ -0,0 +1,136 @@
+[Version = "1.0.0"]
+section enlyze;
+
+loadModule = (fileName as text) =>
+ let
+ binary = Extension.Contents(fileName), asText = Text.FromBinary(binary)
+ in
+ try
+ Expression.Evaluate(asText, #shared) catch (e) =>
+ error
+ [
+ Reason = "loadModule Failure",
+ Message.Format = "Loading '#{0}' failed - '#{1}': '#{2}'",
+ Message.Parameters = {fileName, e[Reason], e[Message]},
+ Detail = [File = fileName, Error = e]
+ ];
+
+Table.ToNavigationTable = loadModule("Table.ToNavigationTable.pqm");
+
+FetchPaginated = loadModule("ApiClient.pqm")[FetchPaginated];
+PaginatedPostRequest = loadModule("ApiClient.pqm")[PaginatedPostRequest];
+
+TransformProductivityMetrics = loadModule("ProductivityMetrics.Transform.pqm")[TransformProductivityMetrics];
+TransformProductionRuns = loadModule("ProductionRuns.Transform.pqm")[TransformProductionRuns];
+TransformSites = loadModule("Sites.Transform.pqm")[TransformSites];
+TransformMachines = loadModule("Machines.Transform.pqm")[TransformMachines];
+TransformProducts = loadModule("Products.Transform.pqm")[TransformProducts];
+TransformDowntimes = loadModule("Downtimes.Transform.pqm")[TransformDowntimes];
+TransformTimeseriesData = loadModule("TimeseriesData.Transform.pqm")[TransformTimeseriesData];
+TransformVariables = loadModule("Variables.Transform.pqm")[TransformVariables];
+
+MachineProductivityMetrics = loadModule("MachineProductivityMetrics.pqm");
+TimeseriesData = loadModule("TimeseriesData.pqm");
+
+[DataSource.Kind = "enlyze", Publish = "enlyze.Publish"]
+shared enlyze.Contents = () =>
+ let
+ NavTable = Table.ToNavigationTable(
+ #table(
+ {"Name", "Key", "Data", "ItemKind", "ItemName", "IsLeaf"},
+ {
+ {
+ "Downtimes",
+ "downtimes",
+ TransformDowntimes(FetchPaginated("/downtimes", null)),
+ "Table",
+ "Table",
+ true
+ },
+ {
+ "Production Runs",
+ "productionRuns",
+ TransformProductionRuns(FetchPaginated("/production-runs", null)),
+ "Table",
+ "Table",
+ true
+ },
+ {
+ "Machines",
+ "machines",
+ TransformMachines(FetchPaginated("/machines", null)),
+ "Table",
+ "Table",
+ true
+ },
+ {"Sites", "sites", TransformSites(FetchPaginated("/sites", null)), "Table", "Table", true},
+ {
+ "Products",
+ "products",
+ TransformProducts(FetchPaginated("/products", null)),
+ "Table",
+ "Table",
+ true
+ },
+ {
+ "Machine Productivity Metrics",
+ "machineProductivityMetrics",
+ MachineProductivityMetrics,
+ "Function",
+ "Function",
+ true
+ },
+ {
+ "Variables",
+ "Variables",
+ TransformVariables(FetchPaginated("/variables", null)),
+ "Table",
+ "Table",
+ true
+ },
+ {"Timeseries", "Timeseries", TimeseriesData, "Function", "Function", true}
+ }
+ ),
+ {"Key"},
+ "Name",
+ "Data",
+ "ItemKind",
+ "ItemName",
+ "IsLeaf"
+ )
+ in
+ NavTable;
+
+enlyze = [
+ Authentication = [
+ Key = [
+ Label = "ENLYZE API Key",
+ KeyLabel = "ENLYZE API Key"
+ ]
+ ],
+ Label = "ENLYZE"
+];
+
+enlyze.Publish = [
+ Beta = true,
+ Category = "Other",
+ ButtonText = {Extension.LoadString("ButtonTitle"), Extension.LoadString("ButtonHelp")},
+ LearnMoreUrl = "https://docs.enlyze.com/",
+ SourceImage = enlyze.Icons,
+ SourceTypeImage = enlyze.Icons
+];
+
+enlyze.Icons = [
+ Icon16 = {
+ Extension.Contents("ENLYZE16.png"),
+ Extension.Contents("ENLYZE20.png"),
+ Extension.Contents("ENLYZE24.png"),
+ Extension.Contents("ENLYZE32.png")
+ },
+ Icon32 = {
+ Extension.Contents("ENLYZE32.png"),
+ Extension.Contents("ENLYZE40.png"),
+ Extension.Contents("ENLYZE48.png"),
+ Extension.Contents("ENLYZE64.png")
+ }
+];
diff --git a/enlyze.pq.proj b/enlyze.pq.proj
new file mode 100644
index 0000000..9560ef7
--- /dev/null
+++ b/enlyze.pq.proj
@@ -0,0 +1,35 @@
+
+
+
+ Debug
+
+
+ $(MSBuildProjectDirectory)\bin\AnyCPU\Debug\
+ $(MSBuildProjectDirectory)\obj\
+ $(IntermediateOutputPath)MEZ\
+ $(OutputPath)enlyze-powerbi.mez
+
+
+ $(MSBuildProjectDirectory)\bin\AnyCPU\Release\
+ $(MSBuildProjectDirectory)\obj\Release\
+ $(IntermediateOutputPath)MEZ\
+ $(OutputPath)enlyze-powerbi.mez
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/enlyze.query.pq b/enlyze.query.pq
new file mode 100644
index 0000000..f4d4336
--- /dev/null
+++ b/enlyze.query.pq
@@ -0,0 +1,17 @@
+let
+ navTable = enlyze.Contents(),
+ machines = navTable{[Key="machines"]}[Data],
+ downtimes = navTable{[Key="downtimes"]}[Data],
+
+ start = #datetimezone(2024, 1, 1, 0, 0, 0, 0, 0),
+ end = #datetimezone(2024, 1, 7, 0, 0, 0, 0, 0),
+ resolution = "daily",
+
+ productivityMetrics = navTable{[Key="machineProductivityMetrics"]}[Data](
+ machines,
+ start,
+ end,
+ resolution
+ )
+in
+ downtimes
diff --git a/helpers/Table.ChangeType.pqm b/helpers/Table.ChangeType.pqm
new file mode 100644
index 0000000..a4cca1c
--- /dev/null
+++ b/helpers/Table.ChangeType.pqm
@@ -0,0 +1,195 @@
+let
+ // table should be an actual Table.Type, or a List.Type of Records
+ Table.ChangeType = (table, tableType as type) as nullable table =>
+ // we only operate on table types
+ if (not Type.Is(tableType, type table)) then
+ error "type argument should be a table type"
+ else
+ // if we have a null value, just return it
+ if (table = null) then
+ table
+ else
+ let
+ columnsForType = Type.RecordFields(Type.TableRow(tableType)),
+ columnsAsTable = Record.ToTable(columnsForType),
+ schema = Table.ExpandRecordColumn(columnsAsTable, "Value", {"Type"}, {"Type"}),
+ previousMeta = Value.Metadata(tableType),
+ // make sure we have a table
+ parameterType = Value.Type(table),
+ _table =
+ if (Type.Is(parameterType, type table)) then
+ table
+ else if (Type.Is(parameterType, type list)) then
+ let
+ asTable = Table.FromList(table, Splitter.SplitByNothing(), {"Column1"}),
+ firstValueType = Value.Type(Table.FirstValue(asTable, null)),
+ result =
+ // if the member is a record (as expected), then expand it.
+ if (Type.Is(firstValueType, type record)) then
+ Table.ExpandRecordColumn(asTable, "Column1", schema[Name])
+ else
+ error
+ Error.Record(
+ "Error.Parameter",
+ "table argument is a list, but not a list of records",
+ [
+ ValueType = firstValueType
+ ]
+ )
+ in
+ if (List.IsEmpty(table)) then
+ #table({"a"}, {})
+ else
+ result
+ else
+ error
+ Error.Record(
+ "Error.Parameter",
+ "table argument should be a table or list of records",
+ [
+ ValueType = parameterType
+ ]
+ ),
+ reordered = Table.SelectColumns(_table, schema[Name], MissingField.UseNull),
+ // process primitive values - this will call Table.TransformColumnTypes
+ map = (t) =>
+ if Type.Is(t, type table) or Type.Is(t, type list) or Type.Is(t, type record) or t = type any
+ then
+ null
+ else
+ t,
+ mapped = Table.TransformColumns(schema, {"Type", map}),
+ omitted = Table.SelectRows(mapped, each [Type] <> null),
+ existingColumns = Table.ColumnNames(reordered),
+ removeMissing = Table.SelectRows(omitted, each List.Contains(existingColumns, [Name])),
+ primitiveTransforms = Table.ToRows(removeMissing),
+ changedPrimitives = Table.TransformColumnTypes(reordered, primitiveTransforms),
+ // Get the list of transforms we'll use for Record types
+ recordColumns = Table.SelectRows(schema, each Type.Is([Type], type record)),
+ recordTypeTransformations = Table.AddColumn(
+ recordColumns, "RecordTransformations", each (r) => Record.ChangeType(r, [Type]), type function
+ ),
+ recordChanges = Table.ToRows(
+ Table.SelectColumns(recordTypeTransformations, {"Name", "RecordTransformations"})
+ ),
+ // Get the list of transforms we'll use for List types
+ listColumns = Table.SelectRows(schema, each Type.Is([Type], type list)),
+ listTransforms = Table.AddColumn(
+ listColumns, "ListTransformations", each (t) => List.ChangeType(t, [Type]), Function.Type
+ ),
+ listChanges = Table.ToRows(Table.SelectColumns(listTransforms, {"Name", "ListTransformations"})),
+ // Get the list of transforms we'll use for Table types
+ tableColumns = Table.SelectRows(schema, each Type.Is([Type], type table)),
+ tableTransforms = Table.AddColumn(
+ tableColumns, "TableTransformations", each (t) => @Table.ChangeType(t, [Type]), Function.Type
+ ),
+ tableChanges = Table.ToRows(Table.SelectColumns(tableTransforms, {"Name", "TableTransformations"})),
+ // Perform all of our transformations
+ allColumnTransforms = recordChanges & listChanges & tableChanges,
+ changedRecordTypes =
+ if (List.IsEmpty(allColumnTransforms)) then
+ changedPrimitives
+ else
+ Table.TransformColumns(changedPrimitives, allColumnTransforms, null, MissingField.Ignore),
+ // set final type
+ withType = Value.ReplaceType(changedRecordTypes, tableType)
+ in
+ if (List.IsEmpty(Record.FieldNames(columnsForType))) then
+ table
+ else
+ withType meta previousMeta,
+ // If given a generic record type (no predefined fields), the original record is returned
+ Record.ChangeType = (record as record, recordType as type) =>
+ let
+ // record field format is [ fieldName = [ Type = type, Optional = logical], ... ]
+ fields =
+ try
+ Type.RecordFields(recordType)
+ otherwise
+ error "Record.ChangeType: failed to get record fields. Is this a record type?",
+ fieldNames = Record.FieldNames(fields),
+ fieldTable = Record.ToTable(fields),
+ optionalFields = Table.SelectRows(fieldTable, each[Value][Optional])[Name],
+ requiredFields = List.Difference(fieldNames, optionalFields),
+ // make sure all required fields exist
+ withRequired = Record.SelectFields(record, requiredFields, MissingField.UseNull),
+ // append optional fields
+ withOptional = withRequired & Record.SelectFields(record, optionalFields, MissingField.Ignore),
+ // set types
+ transforms = GetTransformsForType(recordType),
+ withTypes = Record.TransformFields(withOptional, transforms, MissingField.Ignore),
+ // order the same as the record type
+ reorder = Record.ReorderFields(withTypes, fieldNames, MissingField.Ignore)
+ in
+ if (List.IsEmpty(fieldNames)) then
+ record
+ else
+ reorder,
+ List.ChangeType = (list as list, listType as type) =>
+ if (not Type.Is(listType, type list)) then
+ error "type argument should be a list type"
+ else
+ let
+ listItemType = Type.ListItem(listType),
+ transform = GetTransformByType(listItemType),
+ modifiedValues = List.Transform(list, transform),
+ typed = Value.ReplaceType(modifiedValues, listType)
+ in
+ typed,
+ // Returns a table type for the provided schema table
+ Schema.ToTableType = (schema as table) as type =>
+ let
+ toList = List.Transform(schema[Type], (t) => [Type = t, Optional = false]),
+ toRecord = Record.FromList(toList, schema[Name]),
+ toType = Type.ForRecord(toRecord, false),
+ previousMeta = Value.Metadata(schema)
+ in
+ type table (toType) meta previousMeta,
+ // Returns a list of transformations that can be passed to Table.TransformColumns, or Record.TransformFields
+ // Format: {"Column", (f) => ...) .... ex: {"A", Number.From}
+ GetTransformsForType = (_type as type) as list =>
+ let
+ fieldsOrColumns =
+ if (Type.Is(_type, type record)) then
+ Type.RecordFields(_type)
+ else if (Type.Is(_type, type table)) then
+ Type.RecordFields(Type.TableRow(_type))
+ else
+ error "GetTransformsForType: record or table type expected",
+ toTable = Record.ToTable(fieldsOrColumns),
+ transformColumn = Table.AddColumn(
+ toTable, "Transform", each GetTransformByType([Value][Type]), Function.Type
+ ),
+ transformMap = Table.ToRows(Table.SelectColumns(transformColumn, {"Name", "Transform"}))
+ in
+ transformMap,
+ GetTransformByType = (type_in as type) as function =>
+ let
+ _type = Type.NonNullable(type_in)
+ in
+ if (Type.Is(_type, type number)) then
+ Number.From
+ else if (Type.Is(_type, type text)) then
+ Text.From
+ else if (Type.Is(_type, type date)) then
+ Date.From
+ else if (Type.Is(_type, type datetime)) then
+ DateTime.From
+ else if (Type.Is(_type, type duration)) then
+ Duration.From
+ else if (Type.Is(_type, type datetimezone)) then
+ DateTimeZone.From
+ else if (Type.Is(_type, type logical)) then
+ Logical.From
+ else if (Type.Is(_type, type time)) then
+ Time.From
+ else if (Type.Is(_type, type record)) then
+ (t) => if (t <> null) then @Record.ChangeType(t, _type) else t
+ else if (Type.Is(_type, type table)) then
+ (t) => if (t <> null) then @Table.ChangeType(t, _type) else t
+ else if (Type.Is(_type, type list)) then
+ (t) => if (t <> null) then @List.ChangeType(t, _type) else t
+ else
+ (t) => t
+in
+ Table.ChangeType
diff --git a/helpers/Table.ToNavigationTable.pqm b/helpers/Table.ToNavigationTable.pqm
new file mode 100644
index 0000000..fa78f5e
--- /dev/null
+++ b/helpers/Table.ToNavigationTable.pqm
@@ -0,0 +1,21 @@
+(
+ table as table,
+ keyColumns as list,
+ nameColumn as text,
+ dataColumn as text,
+ itemKindColumn as text,
+ itemNameColumn as text,
+ isLeafColumn as text
+) as table =>
+ let
+ tableType = Value.Type(table),
+ newTableType = Type.AddTableKey(tableType, keyColumns, true) meta [
+ NavigationTable.NameColumn = nameColumn,
+ NavigationTable.DataColumn = dataColumn,
+ NavigationTable.ItemKindColumn = itemKindColumn,
+ Preview.DelayColumn = itemNameColumn,
+ NavigationTable.IsLeafColumn = isLeafColumn
+ ],
+ navigationTable = Value.ReplaceType(table, newTableType)
+ in
+ navigationTable
diff --git a/machines/Machines.TableSchema.pqm b/machines/Machines.TableSchema.pqm
new file mode 100644
index 0000000..f1f16e9
--- /dev/null
+++ b/machines/Machines.TableSchema.pqm
@@ -0,0 +1,6 @@
+let
+ MachinesTableSchema = type table [
+ name = text, uuid = text, site = text, genesis_date = datetimezone, #"productivity_metrics" = any
+ ]
+in
+ MachinesTableSchema
diff --git a/machines/Machines.Transform.pqm b/machines/Machines.Transform.pqm
new file mode 100644
index 0000000..fc2235f
--- /dev/null
+++ b/machines/Machines.Transform.pqm
@@ -0,0 +1,40 @@
+let
+ loadModule = (fileName as text) =>
+ let
+ binary = Extension.Contents(fileName), asText = Text.FromBinary(binary)
+ in
+ try
+ Expression.Evaluate(asText, #shared) catch (e) =>
+ error
+ [
+ Reason = "Extension.LoadModule Failure",
+ Message.Format = "Loading '#{0}' failed - '#{1}': '#{2}'",
+ Message.Parameters = {fileName, e[Reason], e[Message]},
+ Detail = [File = fileName, Error = e]
+ ],
+ MachineProductivityMetrics = loadModule("MachineProductivityMetrics.pqm"),
+ FunctionTypeSingleMachine = loadModule(
+ "MachineProductivityMetrics.FunctionTypes.pqm"
+ )[MachineProductivityMetricsSingleMachineType],
+ TableSchema = loadModule("Machines.TableSchema.pqm"),
+ Table.ChangeType = loadModule("Table.ChangeType.pqm"),
+ TransformMachines = (machines as list) as table =>
+ let
+ machinesTable = Table.FromList(machines, Splitter.SplitByNothing(), null, null, ExtraValues.Error),
+ namedTable = Value.ReplaceMetadata(machinesTable, Value.Metadata(machinesTable) & [Name = "Machines"]),
+ expandedTable = Table.ExpandRecordColumn(namedTable, "Column1", {"name", "uuid", "site", "genesis_date"}),
+ columnNames = Table.ColumnNames(expandedTable),
+ machinesWithMetrics = Table.AddColumn(
+ expandedTable,
+ "productivity_metrics",
+ (row) =>
+ let
+ func = (start as datetimezone, end as datetimezone, resolution as text) =>
+ MachineProductivityMetrics(Table.FromRecords({row}), start, end, resolution)
+ in
+ Value.ReplaceType(func, FunctionTypeSingleMachine)
+ )
+ in
+ Table.ChangeType(machinesWithMetrics, TableSchema)
+in
+ [TransformMachines = TransformMachines]
diff --git a/productionRuns/ProductionRuns.TableSchema.pqm b/productionRuns/ProductionRuns.TableSchema.pqm
new file mode 100644
index 0000000..c884d45
--- /dev/null
+++ b/productionRuns/ProductionRuns.TableSchema.pqm
@@ -0,0 +1,33 @@
+let
+ ProductionRunsTableSchema = type table [
+ uuid = text,
+ machine = text,
+ production_order = text,
+ product = text,
+ start = datetimezone,
+ end = nullable datetimezone,
+ average_throughput = nullable number,
+ availability_score = nullable number,
+ availability_time_loss = nullable number,
+ performance_score = nullable number,
+ performance_time_loss = nullable number,
+ quality_score = nullable number,
+ quality_time_loss = nullable number,
+ productivity_score = nullable number,
+ productivity_time_loss = nullable number,
+ quantity_scrap_value = nullable number,
+ quantity_scrap_unit = nullable text,
+ quantity_yield_value = nullable number,
+ quantity_yield_unit = nullable text,
+ quantity_total_value = nullable number,
+ quantity_total_unit = nullable text,
+ data_coverage = nullable number,
+ overlap_percentage = nullable number,
+ overlap_time = nullable number,
+ max_run_speed_value = nullable number,
+ max_run_speed_unit = nullable text,
+ max_run_speed_start = nullable datetimezone,
+ max_run_speed_end = nullable datetimezone
+ ]
+in
+ ProductionRunsTableSchema
diff --git a/productionRuns/ProductionRuns.Transform.pqm b/productionRuns/ProductionRuns.Transform.pqm
new file mode 100644
index 0000000..279de03
--- /dev/null
+++ b/productionRuns/ProductionRuns.Transform.pqm
@@ -0,0 +1,73 @@
+let
+ loadModule = (fileName as text) =>
+ let
+ binary = Extension.Contents(fileName), asText = Text.FromBinary(binary)
+ in
+ try
+ Expression.Evaluate(asText, #shared) catch (e) =>
+ error
+ [
+ Reason = "Extension.LoadModule Failure",
+ Message.Format = "Loading '#{0}' failed - '#{1}': '#{2}'",
+ Message.Parameters = {fileName, e[Reason], e[Message]},
+ Detail = [File = fileName, Error = e]
+ ],
+ TableSchema = loadModule("ProductionRuns.TableSchema.pqm"),
+ Table.ChangeType = loadModule("Table.ChangeType.pqm"),
+ TransformProductivity = loadModule("ProductivityMetrics.Transform.pqm")[TransformProductivity],
+ TransformAvailability = loadModule("ProductivityMetrics.Transform.pqm")[TransformAvailability],
+ TransformPerformance = loadModule("ProductivityMetrics.Transform.pqm")[TransformPerformance],
+ TransformQuality = loadModule("ProductivityMetrics.Transform.pqm")[TransformQuality],
+ TransformQuantityScrap = loadModule("Quantities.Transform.pqm")[TransformQuantityScrap],
+ TransformQuantityYield = loadModule("Quantities.Transform.pqm")[TransformQuantityYield],
+ TransformQuantityTotal = loadModule("Quantities.Transform.pqm")[TransformQuantityTotal],
+ TransformProductionRuns = (productionRuns as list) as table =>
+ let
+ productionRunsTable = Table.FromList(
+ productionRuns, Splitter.SplitByNothing(), null, null, ExtraValues.Error
+ ),
+ namedTable = Value.ReplaceMetadata(
+ productionRunsTable, Value.Metadata(productionRunsTable) & [Name = "Production Runs"]
+ ),
+ expandedTable = Table.ExpandRecordColumn(
+ namedTable,
+ "Column1",
+ {
+ "uuid",
+ "machine",
+ "production_order",
+ "product",
+ "start",
+ "end",
+ "average_throughput",
+ "quantity_total",
+ "quantity_scrap",
+ "quantity_yield",
+ "availability",
+ "performance",
+ "quality",
+ "productivity",
+ "maximum_run_speed",
+ "data_quality"
+ }
+ ),
+ expandedAvailability = TransformAvailability(expandedTable),
+ expandedPerformance = TransformPerformance(expandedAvailability),
+ expandedQuality = TransformQuality(expandedPerformance),
+ expandedProductivity = TransformProductivity(expandedQuality),
+ expandedQuantityScrap = TransformQuantityScrap(expandedProductivity),
+ expandedQuantityYield = TransformQuantityYield(expandedQuantityScrap),
+ expandedQuantityTotal = TransformQuantityTotal(expandedQuantityYield),
+ expandedDataQuality = Table.ExpandRecordColumn(
+ expandedQuantityTotal, "data_quality", {"data_coverage", "overlap_percentage", "overlap_time"}
+ ),
+ expandedMaxRunSpeed = Table.ExpandRecordColumn(
+ expandedDataQuality,
+ "maximum_run_speed",
+ {"value", "observation_period_start", "observation_period_end", "unit"},
+ {"max_run_speed_value", "max_run_speed_start", "max_run_speed_end", "max_run_speed_unit"}
+ )
+ in
+ Table.ChangeType(expandedMaxRunSpeed, TableSchema)
+in
+ [TransformProductionRuns = TransformProductionRuns]
diff --git a/productivityMetrics/DateTimeRanges.pqm b/productivityMetrics/DateTimeRanges.pqm
new file mode 100644
index 0000000..f1b768a
--- /dev/null
+++ b/productivityMetrics/DateTimeRanges.pqm
@@ -0,0 +1,136 @@
+let
+ ToIso8601 = (dt as datetimezone) as text => DateTimeZone.ToText(dt, [Format = "O", Culture = "en-US"]),
+ ToHourlyDateTimeRanges = (start as datetimezone, end as datetimezone, resolution as text) as list =>
+ let
+ roundToHour = (dt as datetimezone) =>
+ #datetimezone(
+ Date.Year(dt),
+ Date.Month(dt),
+ Date.Day(dt),
+ Time.Hour(dt),
+ 0,
+ 0,
+ DateTimeZone.ZoneHours(dt),
+ DateTimeZone.ZoneMinutes(dt)
+ ),
+ durationOneHour = #duration(0, 1, 0, 0),
+ addOneHour = (dt as datetimezone) => DateTimeZone.From(dt + durationOneHour),
+ roundedStart = roundToHour(start),
+ hourStarts = List.Generate(() => roundedStart, each _ <= end, each addOneHour(_)),
+ ranges = List.Transform(
+ hourStarts, (hourStart) => [
+ start = ToIso8601(hourStart),
+ end = ToIso8601(addOneHour(hourStart))
+ ]
+ )
+ in
+ ranges,
+ ToDailyDateTimeRanges = (start as datetimezone, end as datetimezone) as list =>
+ let
+ roundToDay = (dt as datetimezone) =>
+ #datetimezone(
+ Date.Year(dt),
+ Date.Month(dt),
+ Date.Day(dt),
+ 0,
+ 0,
+ 0,
+ DateTimeZone.ZoneHours(dt),
+ DateTimeZone.ZoneMinutes(dt)
+ ),
+ addDay = (dt as datetimezone) => DateTimeZone.From(Date.AddDays(dt, 1)),
+ dayStarts = List.Generate(() => roundToDay(start), each Date.From(_) <= Date.From(end), each addDay(_)),
+ ranges = List.Transform(
+ dayStarts, (dayStart) => [
+ start = ToIso8601(dayStart),
+ end = ToIso8601(addDay(dayStart))
+ ]
+ )
+ in
+ ranges,
+ ToWeeklyDateTimeRanges = (start as datetimezone, end as datetimezone) as list =>
+ let
+ roundToWeekStart = (dt as datetimezone) =>
+ let
+ dayOfWeek = Date.DayOfWeek(Date.From(dt), Day.Monday) + 1,
+ mondayOfWeek = Date.AddDays(Date.From(dt), - (dayOfWeek - 1))
+ in
+ #datetimezone(
+ Date.Year(mondayOfWeek),
+ Date.Month(mondayOfWeek),
+ Date.Day(mondayOfWeek),
+ 0,
+ 0,
+ 0,
+ DateTimeZone.ZoneHours(dt),
+ DateTimeZone.ZoneMinutes(dt)
+ ),
+ endOfWeek = (dt as datetimezone) =>
+ let
+ nextMonday = roundToWeekStart(dt) + #duration(7, 0, 0, 0)
+ in
+ #datetimezone(
+ Date.Year(nextMonday),
+ Date.Month(nextMonday),
+ Date.Day(nextMonday),
+ 0,
+ 0,
+ 0,
+ DateTimeZone.ZoneHours(dt),
+ DateTimeZone.ZoneMinutes(dt)
+ ),
+ weekStarts = List.Generate(
+ () => roundToWeekStart(start), each _ <= end, each DateTimeZone.From(Date.AddDays(_, 7))
+ ),
+ ranges = List.Transform(
+ weekStarts, (weekStart) => [
+ start = ToIso8601(weekStart),
+ end = ToIso8601(endOfWeek(weekStart))
+ ]
+ )
+ in
+ ranges,
+ ToMonthlyDateTimeRanges = (start as datetimezone, end as datetimezone) as list =>
+ let
+ roundToMonthStart = (dt as datetimezone) =>
+ #datetimezone(
+ Date.Year(dt), Date.Month(dt), 1, 0, 0, 0, DateTimeZone.ZoneHours(dt), DateTimeZone.ZoneMinutes(
+ dt
+ )
+ ),
+ startOfNextMonth = (dt as datetimezone) =>
+ let
+ nextMonth = Date.AddMonths(Date.From(dt), 1)
+ in
+ #datetimezone(
+ Date.Year(nextMonth),
+ Date.Month(nextMonth),
+ 1,
+ 0,
+ 0,
+ 0,
+ DateTimeZone.ZoneHours(dt),
+ DateTimeZone.ZoneMinutes(dt)
+ ),
+ monthStarts = List.Generate(
+ () => roundToMonthStart(start), each _ <= end, each DateTimeZone.From(Date.AddMonths(_, 1))
+ ),
+ ranges = List.Transform(
+ monthStarts,
+ (monthStart) =>
+ [
+ start = ToIso8601(monthStart),
+ end = ToIso8601(
+ if end <= startOfNextMonth(monthStart) then end else startOfNextMonth(monthStart)
+ )
+ ]
+ )
+ in
+ ranges
+in
+ [
+ ToHourlyDateTimeRanges = ToHourlyDateTimeRanges,
+ ToDailyDateTimeRanges = ToDailyDateTimeRanges,
+ ToWeeklyDateTimeRanges = ToWeeklyDateTimeRanges,
+ ToMonthlyDateTimeRanges = ToMonthlyDateTimeRanges
+ ]
diff --git a/productivityMetrics/MachineProductivityMetrics.FunctionTypes.pqm b/productivityMetrics/MachineProductivityMetrics.FunctionTypes.pqm
new file mode 100644
index 0000000..37a244f
--- /dev/null
+++ b/productivityMetrics/MachineProductivityMetrics.FunctionTypes.pqm
@@ -0,0 +1,28 @@
+let
+ MachineProductivityMetricsSingleMachineType = type function (
+ start as datetimezone,
+ end as datetimezone,
+ resolution as (
+ type text meta [
+ Documentation.Label = "Resolution",
+ Documentation.Description = "Select a resolution.",
+ Documentation.AllowedValues = {"hourly", "daily", "weekly", "monthly"}
+ ]
+ )
+ ) as table,
+ MachineProductivityMetricsType = type function (
+ machines as table,
+ start as datetimezone,
+ end as datetimezone,
+ resolution as (
+ type text meta [
+ Documentation.Description = "Select a resolution.",
+ Documentation.AllowedValues = {"hourly", "daily", "weekly", "monthly"}
+ ]
+ )
+ ) as table
+in
+ [
+ MachineProductivityMetricsSingleMachineType = MachineProductivityMetricsSingleMachineType,
+ MachineProductivityMetricsType = MachineProductivityMetricsType
+ ]
diff --git a/productivityMetrics/MachineProductivityMetrics.TableSchema.pqm b/productivityMetrics/MachineProductivityMetrics.TableSchema.pqm
new file mode 100644
index 0000000..6adc543
--- /dev/null
+++ b/productivityMetrics/MachineProductivityMetrics.TableSchema.pqm
@@ -0,0 +1,20 @@
+let
+ productivityMetricsTableSchema = type table [
+ start = datetimezone,
+ end = datetimezone,
+ machine = text,
+ availability_score = nullable number,
+ availability_time_loss = nullable number,
+ performance_score = nullable number,
+ performance_time_loss = nullable number,
+ quality_score = nullable number,
+ quality_time_loss = nullable number,
+ productivity_score = nullable number,
+ productivity_time_loss = nullable number,
+ quantity_scrap_value = nullable number,
+ quantity_scrap_unit = nullable text,
+ quantity_yield_value = nullable number,
+ quantity_yield_unit = nullable text
+ ]
+in
+ productivityMetricsTableSchema
diff --git a/productivityMetrics/MachineProductivityMetrics.pqm b/productivityMetrics/MachineProductivityMetrics.pqm
new file mode 100644
index 0000000..a618bc1
--- /dev/null
+++ b/productivityMetrics/MachineProductivityMetrics.pqm
@@ -0,0 +1,53 @@
+let
+ loadModule = (fileName as text) =>
+ let
+ binary = Extension.Contents(fileName), asText = Text.FromBinary(binary)
+ in
+ try
+ Expression.Evaluate(asText, #shared) catch (e) =>
+ error
+ [
+ Reason = "Extension.LoadModule Failure",
+ Message.Format = "Loading '#{0}' failed - '#{1}': '#{2}'",
+ Message.Parameters = {fileName, e[Reason], e[Message]},
+ Detail = [File = fileName, Error = e]
+ ],
+ TableSchema = loadModule("MachineProductivityMetrics.TableSchema.pqm"),
+ DateTimeRanges = loadModule("DateTimeRanges.pqm"),
+ PaginatedPostRequest = loadModule("ApiClient.pqm")[PaginatedPostRequest],
+ TransformProductivityMetrics = loadModule("ProductivityMetrics.Transform.pqm")[TransformProductivityMetrics],
+ FunctionType = loadModule("MachineProductivityMetrics.FunctionTypes.pqm")[MachineProductivityMetricsType],
+ MachineProductivityMetrics = (machines as table, start as datetimezone, end as datetimezone, resolution as text) as table =>
+ let
+ machineUuids = Table.Column(machines, "uuid"),
+ dateTimeRanges =
+ if resolution = "hourly" then
+ DateTimeRanges[ToHourlyDateTimeRanges](start, end, resolution)
+ else if resolution = "daily" then
+ DateTimeRanges[ToDailyDateTimeRanges](start, end)
+ else if resolution = "weekly" then
+ DateTimeRanges[ToWeeklyDateTimeRanges](start, end)
+ else if resolution = "monthly" then
+ DateTimeRanges[ToMonthlyDateTimeRanges](start, end)
+ else
+ error "Invalid resolution. Please choose 'hourly', 'daily', 'weekly', or 'monthly'.",
+ _ = Diagnostics.Trace(TraceLevel.Error, "Generated Date Ranges:", Text.From(dateTimeRanges)),
+ accumulated = List.Accumulate(
+ machineUuids,
+ #table(TableSchema, {}),
+ (state, machineUuid) =>
+ let
+ responseData = PaginatedPostRequest(
+ "/machines/" & machineUuid & "/productivity-metrics", [datetime_ranges = dateTimeRanges]
+ ),
+ responseDataWithMachine = List.Transform(
+ responseData, (r) => Record.AddField(r, "machine", machineUuid)
+ )
+ in
+ Table.Combine({state, TransformProductivityMetrics(responseDataWithMachine)})
+ )
+ in
+ accumulated,
+ MachineProductivityMetricsCorrectType = Value.ReplaceType(MachineProductivityMetrics, FunctionType)
+in
+ MachineProductivityMetricsCorrectType
diff --git a/productivityMetrics/ProductivityMetrics.Transform.pqm b/productivityMetrics/ProductivityMetrics.Transform.pqm
new file mode 100644
index 0000000..31759bc
--- /dev/null
+++ b/productivityMetrics/ProductivityMetrics.Transform.pqm
@@ -0,0 +1,65 @@
+let
+ loadModule = (fileName as text) =>
+ let
+ binary = Extension.Contents(fileName), asText = Text.FromBinary(binary)
+ in
+ try
+ Expression.Evaluate(asText, #shared) catch (e) =>
+ error
+ [
+ Reason = "Extension.LoadModule Failure",
+ Message.Format = "Loading '#{0}' failed - '#{1}': '#{2}'",
+ Message.Parameters = {fileName, e[Reason], e[Message]},
+ Detail = [File = fileName, Error = e]
+ ],
+ TableSchema = loadModule("MachineProductivityMetrics.TableSchema.pqm"),
+ Table.ChangeType = loadModule("Table.ChangeType.pqm"),
+ TransformQuantityScrap = loadModule("Quantities.Transform.pqm")[TransformQuantityScrap],
+ TransformQuantityYield = loadModule("Quantities.Transform.pqm")[TransformQuantityYield],
+ TransformMetric = (tbl as table, fieldName as text) as table =>
+ Table.ExpandRecordColumn(
+ tbl, fieldName, {"score", "time_loss"}, {fieldName & "_score", fieldName & "_time_loss"}
+ ),
+ TransformAvailability = (tbl as table) as table => TransformMetric(tbl, "availability"),
+ TransformPerformance = (tbl as table) as table => TransformMetric(tbl, "performance"),
+ TransformQuality = (tbl as table) as table => TransformMetric(tbl, "quality"),
+ TransformProductivity = (tbl as table) as table => TransformMetric(tbl, "productivity"),
+ TransformProductivityMetrics = (productivityMetrics as list) as table =>
+ let
+ productivityMetricsTable = Table.FromList(
+ productivityMetrics, Splitter.SplitByNothing(), null, null, ExtraValues.Error
+ ),
+ namedTable = Value.ReplaceMetadata(
+ productivityMetricsTable, Value.Metadata(productivityMetricsTable) & [Name = "Productivity Metrics"]
+ ),
+ expandedTable = Table.ExpandRecordColumn(
+ namedTable,
+ "Column1",
+ {
+ "start",
+ "end",
+ "machine",
+ "availability",
+ "performance",
+ "quality",
+ "productivity",
+ "quantity_scrap",
+ "quantity_yield"
+ }
+ ),
+ expandedAvailability = TransformAvailability(expandedTable),
+ expandedPerformance = TransformPerformance(expandedAvailability),
+ expandedQuality = TransformQuality(expandedPerformance),
+ expandedProductivity = TransformProductivity(expandedQuality),
+ expandedQuantityScrap = TransformQuantityScrap(expandedProductivity),
+ expandedQuantityYield = TransformQuantityYield(expandedQuantityScrap)
+ in
+ Table.ChangeType(expandedQuantityYield, TableSchema)
+in
+ [
+ TransformProductivityMetrics = TransformProductivityMetrics,
+ TransformAvailability = TransformAvailability,
+ TransformPerformance = TransformPerformance,
+ TransformProductivity = TransformProductivity,
+ TransformQuality = TransformQuality
+ ]
diff --git a/products/Products.TableSchema.pqm b/products/Products.TableSchema.pqm
new file mode 100644
index 0000000..1c8259d
--- /dev/null
+++ b/products/Products.TableSchema.pqm
@@ -0,0 +1 @@
+let ProductsTableSchema = type table [uuid = text, external_id = text, name = text] in ProductsTableSchema
diff --git a/products/Products.Transform.pqm b/products/Products.Transform.pqm
new file mode 100644
index 0000000..5d06770
--- /dev/null
+++ b/products/Products.Transform.pqm
@@ -0,0 +1,25 @@
+let
+ loadModule = (fileName as text) =>
+ let
+ binary = Extension.Contents(fileName), asText = Text.FromBinary(binary)
+ in
+ try
+ Expression.Evaluate(asText, #shared) catch (e) =>
+ error
+ [
+ Reason = "Extension.LoadModule Failure",
+ Message.Format = "Loading '#{0}' failed - '#{1}': '#{2}'",
+ Message.Parameters = {fileName, e[Reason], e[Message]},
+ Detail = [File = fileName, Error = e]
+ ],
+ TableSchema = loadModule("Products.TableSchema.pqm"),
+ Table.ChangeType = loadModule("Table.ChangeType.pqm"),
+ TransformProducts = (products as list) as table =>
+ let
+ productsTable = Table.FromList(products, Splitter.SplitByNothing(), null, null, ExtraValues.Error),
+ namedTable = Value.ReplaceMetadata(productsTable, Value.Metadata(productsTable) & [Name = "Products"]),
+ expandedTable = Table.ExpandRecordColumn(namedTable, "Column1", {"uuid", "external_id", "name"})
+ in
+ Table.ChangeType(expandedTable, TableSchema)
+in
+ [TransformProducts = TransformProducts]
diff --git a/push-extension.ps1 b/push-extension.ps1
new file mode 100644
index 0000000..7db0860
--- /dev/null
+++ b/push-extension.ps1
@@ -0,0 +1,68 @@
+# PowerShell script to deploy custom connector using VS Code's Power Query SDK
+
+# Configuration
+$projectPath = $PSScriptRoot # Assumes the script is in the project directory
+$mezFileName = "enlyze-powerbi.mez" # Replace with your .mez file name
+$customConnectorsPath = "C:\Mac\Home\Documents\Microsoft Power BI Desktop\Custom Connectors" # Default Power BI custom connectors path
+$projectXmlPath = "enlyze.pq.proj"
+
+function Find-PowerBIDesktop {
+ # If not found in predefined paths, search in common directories
+ $commonDirs = @("C:\Program Files", "C:\Program Files\WindowsApps", "C:\Program Files (x86)", "$env:ProgramFiles", "${env:ProgramFiles(x86)}", "$env:LocalAppData")
+ foreach ($dir in $commonDirs) {
+ $found = Get-ChildItem -Path $dir -Recurse -ErrorAction SilentlyContinue |
+ Where-Object { $_.Name -eq "PBIDesktop.exe" } |
+ Select-Object -First 1 -ExpandProperty FullName
+ if ($found) {
+ return $found
+ }
+ }
+
+ return $null
+}
+
+
+msbuild $projectXmlPath -t:Clean
+msbuild $projectXmlPath
+
+# Find the .mez file
+Write-Host "Searching for the compiled .mez file..."
+$mezFile = Get-ChildItem -Path $projectPath -Recurse -Filter $mezFileName | Select-Object -First 1
+
+# Check if compilation was successful
+if ($null -eq $mezFile) {
+ Write-Host "Compilation failed or .mez file not found. Please check for errors in VS Code and try again."
+ exit 1
+}
+
+Write-Host ".mez file found at: $($mezFile.FullName)"
+
+# Copy .mez file to custom connectors directory
+Write-Host "Copying .mez file to custom connectors directory..."
+Copy-Item $mezFile.FullName -Destination $customConnectorsPath -Force
+
+# Check if copy was successful
+if ($?) {
+ Write-Host ".mez file successfully copied to custom connectors directory."
+} else {
+ Write-Host "Failed to copy .mez file. Please check permissions and try again."
+ exit 1
+}
+
+
+Write-Host "Closing Power BI..."
+Get-Process "PBIDesktop" -ErrorAction SilentlyContinue | Stop-Process -Force
+
+Write-Host "Starting Power BI..."
+$pbiPath = Find-PowerBIDesktop
+if ($null -eq $pbiPath) {
+ Write-Host "Power BI Desktop executable not found. Please start Power BI Desktop manually."
+} else {
+ Write-Host "Closing Power BI..."
+ Get-Process "PBIDesktop" -ErrorAction SilentlyContinue | Stop-Process -Force
+
+ Write-Host "Starting Power BI..."
+ Start-Process $pbiPath
+}
+
+Write-Host "Deployment complete!"
\ No newline at end of file
diff --git a/resources.resx b/resources.resx
new file mode 100644
index 0000000..254ea9c
--- /dev/null
+++ b/resources.resx
@@ -0,0 +1,129 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 2.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ Connect to ENLYZE
+
+
+ ENLYZE
+
+
+ ENLYZE
+
+
diff --git a/resources/ENLYZE16.png b/resources/ENLYZE16.png
new file mode 100644
index 0000000..e971cc3
Binary files /dev/null and b/resources/ENLYZE16.png differ
diff --git a/resources/ENLYZE20.png b/resources/ENLYZE20.png
new file mode 100644
index 0000000..bc81693
Binary files /dev/null and b/resources/ENLYZE20.png differ
diff --git a/resources/ENLYZE24.png b/resources/ENLYZE24.png
new file mode 100644
index 0000000..31c2665
Binary files /dev/null and b/resources/ENLYZE24.png differ
diff --git a/resources/ENLYZE32.png b/resources/ENLYZE32.png
new file mode 100644
index 0000000..dd274de
Binary files /dev/null and b/resources/ENLYZE32.png differ
diff --git a/resources/ENLYZE40.png b/resources/ENLYZE40.png
new file mode 100644
index 0000000..97aca25
Binary files /dev/null and b/resources/ENLYZE40.png differ
diff --git a/resources/ENLYZE48.png b/resources/ENLYZE48.png
new file mode 100644
index 0000000..20c5de9
Binary files /dev/null and b/resources/ENLYZE48.png differ
diff --git a/resources/ENLYZE64.png b/resources/ENLYZE64.png
new file mode 100644
index 0000000..7660e0f
Binary files /dev/null and b/resources/ENLYZE64.png differ
diff --git a/resources/ENLYZE80.png b/resources/ENLYZE80.png
new file mode 100644
index 0000000..fb85c33
Binary files /dev/null and b/resources/ENLYZE80.png differ
diff --git a/shared/Quantities.Transform.pqm b/shared/Quantities.Transform.pqm
new file mode 100644
index 0000000..e6cd36b
--- /dev/null
+++ b/shared/Quantities.Transform.pqm
@@ -0,0 +1,12 @@
+let
+ TransformQuantity = (tbl as table, fieldName as text) as table =>
+ Table.ExpandRecordColumn(tbl, fieldName, {"unit", "value"}, {fieldName & "_unit", fieldName & "_value"}),
+ TransformQuantityScrap = (tbl as table) as table => TransformQuantity(tbl, "quantity_scrap"),
+ TransformQuantityYield = (tbl as table) as table => TransformQuantity(tbl, "quantity_yield"),
+ TransformQuantityTotal = (tbl as table) as table => TransformQuantity(tbl, "quantity_total")
+in
+ [
+ TransformQuantityScrap = TransformQuantityScrap,
+ TransformQuantityYield = TransformQuantityYield,
+ TransformQuantityTotal = TransformQuantityTotal
+ ]
diff --git a/sites/Sites.TableSchema.pqm b/sites/Sites.TableSchema.pqm
new file mode 100644
index 0000000..5b6f18e
--- /dev/null
+++ b/sites/Sites.TableSchema.pqm
@@ -0,0 +1 @@
+let SitesTableSchema = type table [uuid = text, name = text, address = text] in SitesTableSchema
diff --git a/sites/Sites.Transform.pqm b/sites/Sites.Transform.pqm
new file mode 100644
index 0000000..3b31da7
--- /dev/null
+++ b/sites/Sites.Transform.pqm
@@ -0,0 +1,25 @@
+let
+ loadModule = (fileName as text) =>
+ let
+ binary = Extension.Contents(fileName), asText = Text.FromBinary(binary)
+ in
+ try
+ Expression.Evaluate(asText, #shared) catch (e) =>
+ error
+ [
+ Reason = "Extension.LoadModule Failure",
+ Message.Format = "Loading '#{0}' failed - '#{1}': '#{2}'",
+ Message.Parameters = {fileName, e[Reason], e[Message]},
+ Detail = [File = fileName, Error = e]
+ ],
+ TableSchema = loadModule("Sites.TableSchema.pqm"),
+ Table.ChangeType = loadModule("Table.ChangeType.pqm"),
+ TransformSites = (sites as list) as table =>
+ let
+ sitesTable = Table.FromList(sites, Splitter.SplitByNothing(), null, null, ExtraValues.Error),
+ namedTable = Value.ReplaceMetadata(sitesTable, Value.Metadata(sitesTable) & [Name = "Sites"]),
+ expandedTable = Table.ExpandRecordColumn(namedTable, "Column1", {"uuid", "name", "address"})
+ in
+ Table.ChangeType(expandedTable, TableSchema)
+in
+ [TransformSites = TransformSites]
diff --git a/timeseriesData/TimeseriesData.FunctionTypes.pqm b/timeseriesData/TimeseriesData.FunctionTypes.pqm
new file mode 100644
index 0000000..516edde
--- /dev/null
+++ b/timeseriesData/TimeseriesData.FunctionTypes.pqm
@@ -0,0 +1,43 @@
+let
+ TimeseriesDataType = type function (
+ variables as (type table meta [Documentation.Label = "Variables"]),
+ start as datetimezone,
+ end as datetimezone,
+ resolution as (
+ type text meta [
+ Documentation.Label = "Resolution",
+ Documentation.Description = "Select a resolution.",
+ Documentation.AllowedValues = {"1m", "10m", "30m", "1h", "1d", "1w"}
+ ]
+ ),
+ resampling_method as (
+ type text meta [
+ Documentation.Label = "Resampling Method",
+ Documentation.Description = "Select a resampling method.",
+ Documentation.AllowedValues = {"first", "last", "max", "min", "count", "sum", "avg", "median"}
+ ]
+ )
+ ) as table,
+ TimeseriesSingleVariableType = type function (
+ start as datetimezone,
+ end as datetimezone,
+ resolution as (
+ type text meta [
+ Documentation.Label = "Resolution",
+ Documentation.Description = "Select a resolution.",
+ Documentation.AllowedValues = {"1m", "10m", "30m", "1h", "1d", "1w"}
+ ]
+ ),
+ resampling_method as (
+ type text meta [
+ Documentation.Label = "Resampling Method",
+ Documentation.Description = "Select a resampling method.",
+ Documentation.AllowedValues = {"first", "last", "max", "min", "count", "sum", "avg", "median"}
+ ]
+ )
+ ) as table
+in
+ [
+ TimeseriesDataType = TimeseriesDataType,
+ TimeseriesSingleVariableType = TimeseriesSingleVariableType
+ ]
diff --git a/timeseriesData/TimeseriesData.Transform.pqm b/timeseriesData/TimeseriesData.Transform.pqm
new file mode 100644
index 0000000..65edb6d
--- /dev/null
+++ b/timeseriesData/TimeseriesData.Transform.pqm
@@ -0,0 +1,23 @@
+let
+ TransformTimeseriesData = (timeseriesDataPages as list) as table =>
+ let
+ transformedPages = List.Transform(
+ timeseriesDataPages,
+ (page) =>
+ let
+ columns = page[columns],
+ records = page[records],
+ transformedRecords = List.Transform(records, each Record.FromList(_, columns)),
+ pageTable = Table.FromRecords(transformedRecords)
+ in
+ Table.TransformColumns(
+ pageTable, {{"time", each DateTimeZone.FromText(_), type datetimezone}}
+ )
+ ),
+ combinedTable = Table.Combine(transformedPages)
+ in
+ combinedTable
+in
+ [
+ TransformTimeseriesData = TransformTimeseriesData
+ ]
diff --git a/timeseriesData/TimeseriesData.pqm b/timeseriesData/TimeseriesData.pqm
new file mode 100644
index 0000000..4a93639
--- /dev/null
+++ b/timeseriesData/TimeseriesData.pqm
@@ -0,0 +1,76 @@
+let
+ loadModule = (fileName as text) =>
+ let
+ binary = Extension.Contents(fileName), asText = Text.FromBinary(binary)
+ in
+ try
+ Expression.Evaluate(asText, #shared) catch (e) =>
+ error
+ [
+ Reason = "Extension.LoadModule Failure",
+ Message.Format = "Loading '#{0}' failed - '#{1}': '#{2}'",
+ Message.Parameters = {fileName, e[Reason], e[Message]},
+ Detail = [File = fileName, Error = e]
+ ],
+ PaginatedPostRequest = loadModule("ApiClient.pqm")[PaginatedPostRequest],
+ TransformTimeseriesData = loadModule("TimeseriesData.Transforms.pqm")[TransformTimeseriesData],
+ TimeseriesDataType = loadModule("TimeseriesData.FunctionTypes.pqm")[TimeseriesDataType],
+ TimeseriesData = (
+ variables as table, start as datetimezone, end as datetimezone, resolution as text, resampling_method as text
+ ) as table =>
+ let
+ resampling_interval_seconds =
+ if resolution = "1m" then
+ 60
+ else if resolution = "10m" then
+ 600
+ else if resolution = "30m " then
+ 1800
+ else if resolution = "1h" then
+ 3600
+ else if resolution = "1d" then
+ 86400
+ else
+ 604800,
+ variablesByMachine = Table.Group(
+ variables, {"machine"}, {{"GroupedVariables", each _, type table [uuid = text, machine = text]}}
+ ),
+ bodyBase = [start = start, end = end, resampling_interval = resampling_interval_seconds],
+ fetchTimeseriesForMachine = (variablesByMachine as table, machineUuid as text) as table =>
+ let
+ variableCount = Table.RowCount(variablesByMachine),
+ _ =
+ if variableCount > 100 then
+ error
+ "Error: The number of variables for machine "
+ & machineUuid
+ & " exceeds the limit of 100."
+ else
+ null,
+ requestBody = Record.Combine(
+ {
+ bodyBase,
+ [
+ machine = machineUuid,
+ variables = List.Transform(
+ Table.ToRecords(variablesByMachine),
+ each [uuid = _[uuid], resampling_method = resampling_method]
+ )
+ ]
+ }
+ ),
+ timeseriesDataPages = PaginatedPostRequest("/timeseries", requestBody, null),
+ transformedData = TransformTimeseriesData(timeseriesDataPages)
+ in
+ transformedData,
+ resultRecord = Record.FromList(
+ Table.TransformRows(
+ variablesByMachine, each fetchTimeseriesForMachine([GroupedVariables], [machine])
+ ),
+ variablesByMachine[machine]
+ )
+ in
+ Table.FromRecords({resultRecord}),
+ TimeseriesDataCorrectType = Value.ReplaceType(TimeseriesData, TimeseriesDataType)
+in
+ TimeseriesDataCorrectType
diff --git a/variables/Variables.TableSchema.pqm b/variables/Variables.TableSchema.pqm
new file mode 100644
index 0000000..0c1d280
--- /dev/null
+++ b/variables/Variables.TableSchema.pqm
@@ -0,0 +1,13 @@
+let
+ VariablesTableSchema = type table [
+ display_name = nullable text,
+ #"type" = text,
+ machine = text,
+ uuid = text,
+ unit = nullable text,
+ data_type = text,
+ scaling_factor = nullable number,
+ #"timeseries_data" = any
+ ]
+in
+ VariablesTableSchema
diff --git a/variables/Variables.Transform.pqm b/variables/Variables.Transform.pqm
new file mode 100644
index 0000000..26de489
--- /dev/null
+++ b/variables/Variables.Transform.pqm
@@ -0,0 +1,51 @@
+let
+ loadModule = (fileName as text) =>
+ let
+ binary = Extension.Contents(fileName), asText = Text.FromBinary(binary)
+ in
+ try
+ Expression.Evaluate(asText, #shared) catch (e) =>
+ error
+ [
+ Reason = "Extension.LoadModule Failure",
+ Message.Format = "Loading '#{0}' failed - '#{1}': '#{2}'",
+ Message.Parameters = {fileName, e[Reason], e[Message]},
+ Detail = [File = fileName, Error = e]
+ ],
+ TableSchema = loadModule("Variables.TableSchema.pqm"),
+ Table.ChangeType = loadModule("Table.ChangeType.pqm"),
+ TimeseriesData = loadModule("TimeseriesData.pqm"),
+ TimeseriesSingleVariableType = loadModule("TimeseriesData.FunctionTypes.pqm")[TimeseriesSingleVariableType],
+ TransformVariables = (variables as list) as table =>
+ let
+ variablesTable = Table.FromList(variables, Splitter.SplitByNothing(), null, null, ExtraValues.Error),
+ expandedTable = Table.ExpandRecordColumn(
+ variablesTable,
+ "Column1",
+ {"uuid", "display_name", "machine", "unit", "type", "data_type", "scaling_factor"}
+ ),
+ expandedDisplayName = Table.TransformColumns(
+ expandedTable, {{"display_name", each if _ = null then "" else _, type text}}
+ ),
+ expandedUnit = Table.TransformColumns(
+ expandedDisplayName, {{"unit", each if _ = null then "" else _, type text}}
+ ),
+ expandedScalingFactor = Table.TransformColumns(
+ expandedUnit, {{"scaling_factor", each if _ = null then null else _, type number}}
+ ),
+ variablesWithTimeseries = Table.AddColumn(
+ expandedScalingFactor,
+ "timeseries_data",
+ (row) =>
+ let
+ func = (start as datetimezone, end as datetimezone, resolution as text) =>
+ TimeseriesData(Table.FromRecords({row}), start, end, resolution)
+ in
+ Value.ReplaceType(func, TimeseriesSingleVariableType)
+ )
+ in
+ Table.ChangeType(variablesWithTimeseries, TableSchema)
+in
+ [
+ TransformVariables = TransformVariables
+ ]