Skip to content

Commit

Permalink
Merge branch 'devs/peiwen/fix_chatgroup_run' of https://github.com/mi…
Browse files Browse the repository at this point in the history
…crosoft/promptflow into devs/peiwen/fix_chatgroup_run
  • Loading branch information
chw-microsoft committed May 11, 2024
2 parents 3abdc21 + 086c569 commit 47922fe
Show file tree
Hide file tree
Showing 146 changed files with 6,097 additions and 980 deletions.
9 changes: 8 additions & 1 deletion .cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,8 @@
".github/workflows/**",
".github/actions/**",
".github/pipelines/**",
".github/CODEOWNERS"
".github/CODEOWNERS",
"src/promptflow-evals/tests/**"
],
"words": [
"aoai",
Expand Down Expand Up @@ -221,10 +222,16 @@
"mpnet",
"wargs",
"dcid",
"aiohttp",
"endofprompt",
"tkey",
"tparam",
"ncols",
"piezo",
"Piezo",
"cmpop",
"omap",
"Machinal",
"azureopenaimodelconfiguration",
"openaimodelconfiguration"
],
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/build_doc_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ on:
- 'src/promptflow-core/promptflow/**'
- 'src/promptflow-devkit/promptflow/**'
- 'src/promptflow-azure/promptflow/**'
- 'src/promptflow-rag/promptflow/**'

env:
packageSetupType: promptflow_with_extra
Expand Down
29 changes: 18 additions & 11 deletions .github/workflows/build_msi_installer.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ permissions:
env:
packageSetupType: promptflow_with_extra
testWorkingDirectory: src/promptflow
AZURE_ACCOUNT_NAME: promptflowartifact
AZURE_MSI_CONTAINER_NAME: msi-installer
AZURE_PORTABLE_CONTAINER_NAME: portable-installer

jobs:
build_msi_installer:
Expand Down Expand Up @@ -187,7 +190,7 @@ jobs:
- name: Download JSON file from Azure Blob Storage
id: download-json
run: |
az storage blob download --account-name promptflowartifact --container-name msi-installer --name latest_version.json --file downloaded_version.json
az storage blob download --account-name ${{ env.AZURE_ACCOUNT_NAME }} --container-name ${{ env.AZURE_MSI_CONTAINER_NAME }} --name latest_version.json --file downloaded_version.json --auth-mode login
$downloaded_version = (Get-Content downloaded_version.json | ConvertFrom-Json).promptflow
echo "::set-output name=downloaded_version::$downloaded_version"
Expand All @@ -203,28 +206,32 @@ jobs:
$jsonContent | Out-File -FilePath latest_version.json -Encoding UTF8
Write-Output "Created latest_version.json with version: $version"
az storage blob upload --account-name promptflowartifact --container-name msi-installer --file "latest_version.json" --name "latest_version.json" --overwrite
az storage blob upload --account-name ${{ env.AZURE_ACCOUNT_NAME }} --container-name ${{ env.AZURE_MSI_CONTAINER_NAME }} --file "latest_version.json" --name "latest_version.json" --overwrite --auth-mode login
} else {
Write-Output "skip uploading since version input isn't greater than latest version or does not start with '1.'"
}
- name: Upload to Azure Storage
run: |
function Upload-File($filePath, $blobName, $containerName) {
az storage blob upload --account-name ${{ env.AZURE_ACCOUNT_NAME }} --container-name $containerName --file $filePath --name $blobName --overwrite --auth-mode login
}
$msi_files = Get-ChildItem -Path 'scripts/installer/windows/out/' -Filter *.msi
foreach ($msi_file in $msi_files) {
if ($env:INPUT_UPLOADASLATEST -ieq 'True') {
az storage blob upload --account-name promptflowartifact --container-name msi-installer --file "scripts/installer/windows/out/$($msi_file.Name)" --name "promptflow.msi" --overwrite
az storage blob copy start --account-name promptflowartifact --destination-container msi-installer --destination-blob "$($msi_file.Name)" --source-container msi-installer --source-blob "promptflow.msi"
} else {
az storage blob upload --account-name promptflowartifact --container-name msi-installer --file "scripts/installer/windows/out/$($msi_file.Name)" --name "$($msi_file.Name)" --overwrite
}
if ($env:INPUT_UPLOADASLATEST -ieq 'True') {
Upload-File "scripts/installer/windows/out/$($msi_file.Name)" "promptflow.msi" ${{ env.AZURE_MSI_CONTAINER_NAME }}
az storage blob copy start --account-name ${{ env.AZURE_ACCOUNT_NAME }} --destination-container ${{ env.AZURE_MSI_CONTAINER_NAME }} --destination-blob "$($msi_file.Name)" --source-container ${{ env.AZURE_MSI_CONTAINER_NAME }} --source-blob "promptflow.msi" --auth-mode login
} else {
Upload-File "scripts/installer/windows/out/$($msi_file.Name)" "$($msi_file.Name)" ${{ env.AZURE_MSI_CONTAINER_NAME }}
}
}
# Upload zip file
if ($env:INPUT_UPLOADASLATEST -ieq 'True') {
az storage blob upload --account-name promptflowartifact --container-name portable-installer --file "promptflow-${{ steps.get-version.outputs.version }}.zip" --name "promptflow.zip" --overwrite
az storage blob copy start --account-name promptflowartifact --destination-container portable-installer --destination-blob "promptflow-${{ steps.get-version.outputs.version }}.zip" --source-container portable-installer --source-blob "promptflow.zip"
Upload-File "promptflow-${{ steps.get-version.outputs.version }}.zip" "promptflow.zip" ${{ env.AZURE_PORTABLE_CONTAINER_NAME }}
az storage blob copy start --account-name ${{ env.AZURE_ACCOUNT_NAME }} --destination-container ${{ env.AZURE_PORTABLE_CONTAINER_NAME }} --destination-blob "promptflow-${{ steps.get-version.outputs.version }}.zip" --source-container ${{ env.AZURE_PORTABLE_CONTAINER_NAME }} --source-blob "promptflow.zip" --auth-mode login
} else {
az storage blob upload --account-name promptflowartifact --container-name portable-installer --file "promptflow-${{ steps.get-version.outputs.version }}.zip" --name "promptflow-${{ steps.get-version.outputs.version }}.zip" --overwrite
Upload-File "promptflow-${{ steps.get-version.outputs.version }}.zip" "promptflow-${{ steps.get-version.outputs.version }}.zip" ${{ env.AZURE_PORTABLE_CONTAINER_NAME }}
}
env:
INPUT_UPLOADASLATEST: ${{ github.event.inputs.uploadAsLatest }}
Expand Down
5 changes: 4 additions & 1 deletion .github/workflows/promptflow-evals-e2e-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,10 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macos-13]
python-version: ['3.8', '3.9', '3.10', '3.11']
# TODO: Encounter hash mismatch for ubuntu-latest and 3.9 combination during installing promptflow-evals package
# https://github.com/microsoft/promptflow/actions/runs/9009397933/job/24753518853?pr=3158
# Add 3.9 back after we figure out the issue
python-version: ['3.8', '3.10', '3.11']
fail-fast: false
# snok/install-poetry need this to support Windows
defaults:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/promptflow-evals-unit-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -104,4 +104,4 @@ jobs:
format: markdown
hide_complexity: true
output: both
thresholds: 40 60
thresholds: 40 60
1 change: 1 addition & 0 deletions .github/workflows/publish_doc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ on:
- 'src/promptflow-core/promptflow/**'
- 'src/promptflow-devkit/promptflow/**'
- 'src/promptflow-azure/promptflow/**'
- 'src/promptflow-rag/promptflow/**'

# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/tools_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ jobs:
strategy:
fail-fast: false
runs-on: ubuntu-latest
environment: Testing
timeout-minutes: 30
env:
DEPENDENCY_SOURCE_MODE: ${{ secrets.DEPENDENCY_SOURCE_MODE }}
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ To get started quickly, you can use a pre-built development environment. **Click

If you want to get started in your local environment, first install the packages:

Ensure you have a python environment, `python=3.9` is recommended.
Ensure you have a python environment, `python>=3.9, <=3.11` is recommended.

```sh
pip install promptflow promptflow-tools
Expand Down
96 changes: 96 additions & 0 deletions docs/cloud/azureai/tracing/index.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
# Tracing from local to cloud

:::{admonition} Experimental feature
This is an experimental feature, and may change at any time. Learn [more](../../../how-to-guides/faq.md#stable-vs-experimental).
:::

Prompt flow [tracing feature](../../../how-to-guides/tracing/index.md) enables users to trace LLM calls, functions and even LLM frameworks. Besides, with `promptflow[azure]` installed, prompt flow can also log traces to an Azure ML workspace or Azure AI project, which makes it possible to share traces with your team members.

## Installing the package

```console
pip install "promptflow[azure]>=1.11.0"
```

## Set cloud destination

To log traces to cloud, first of all, you need an [Azure ML workspace](https://learn.microsoft.com/en-us/azure/machine-learning/concept-workspace?view=azureml-api-2) or [Azure AI project](https://learn.microsoft.com/en-us/azure/ai-studio/how-to/create-projects). Then, you can set the destination. Make sure you have logged in Azure CLI (`az login`, refer to [Azure CLI doc](https://learn.microsoft.com/en-us/cli/azure/) for more informations) before execute below CLI command:

```console
pf config set trace.destination=azureml://subscriptions/<subscription-id>/resourcegroups/<resource-group-name>/providers/Microsoft.MachineLearningServices/workspaces/<workspace-or-project-name>
```

Fill in with your own subscription ID, resource group name, workspace or project name, and all is ready now. You can make LLM calls, run LLM application or execute your flow with `pf flow test` or `pf run create`, you will see an Azure portal URL link in the console:

```console
You can view the traces in cloud from Azure portal: https://ml.azure.com/prompts/trace/run/<run-name>/details?wsid=/subscriptions/<subscription-id>/resourceGroups/<resource-group-name>/providers/Microsoft.MachineLearningServices/workspaces/<workspace-or-project-name>
```
Click the link to view the traces in Azure portal, and feel free to share it with your team members.

![trace-ui-portal](../../../media/trace/trace-ui-portal-demo.gif)

## Storage

Traces in Azure ML workspace/AI project are persisted in an [Azure Cosmos DB](https://learn.microsoft.com/en-us/azure/cosmos-db/) associated with the workspace/project. It will be automatically setup the first time you execute CLI command `pf config set trace.destination` for a workspace/project.

## Set different destination

Prompt flow also supports to log traces to different workspace/project across different flows. To configure this, you need to set config to `azureml` via CLI command:

```console
pf config set trace.destination=azureml
```

Then, you need to prepare the configuration files pointing to different workspace/project; prompt flow currently recognizes [workspace configuration file](https://learn.microsoft.com/en-us/azure/machine-learning/how-to-configure-environment?view=azureml-api-2#local-and-dsvm-only-create-a-workspace-configuration-file) `config.json`, you can manually create one or download from Azure portal, this JSON file contains all required informations of a workspace/project:

```json
{
"subscription_id": "<subscription-id>",
"resource_group": "<resource-group-name>",
"workspace_name": "<workspace-name>"
}
```

When `trace.destination` is set to `azureml`, prompt flow will search for a `config.json`, starts from `.azureml` under flow folder, then goes up to parent folder until it finds one. If no `config.json` is found, an error will be raised. It is recommended to place `config.json` under a folder named `.azureml` in your flow directory, which makes prompt flow to find it easily.

Below is an example folder structure:

```
flows
├── flow1
│ ├── .azureml
│ │ └── config.json # workspace/project A
│ ├── flow.flex.yaml
│ ├── llm.py
│ ├── data.jsonl
│ ...
├── flow2
│ ├── .azureml
│ │ └── config.json # workspace/project B
│ ├── flow.dag.yaml
│ ├── hello.py
│ ├── data.jsonl
└── ...
```

Then when execute `flow1`, traces will be logged to workspace/project A, while execute `flow2`, traces will be logged to workspace/project B.

## Disable logging to cloud

When you want to disable logging traces to cloud, you can switch back to local by below CLI command:

```console
pf config set trace.destination=local
```

`local` is the default value for `pf.trace.destination`, and no traces will be logged to Azure anymore with this value; note that traces will still be logged to local.

## Disable tracing feature

Use below CLI command to disable prompt flow tracing feature:

```console
pf config set trace.destination=none
```

Then no traces will be logged to neither local nor cloud.
6 changes: 6 additions & 0 deletions docs/cloud/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,12 @@ In prompt flow, You can develop your flow locally and then seamlessly transition

For more resources on Azure AI, visit the cloud documentation site: [Build AI solutions with prompt flow](https://learn.microsoft.com/en-us/azure/machine-learning/prompt-flow/get-started-prompt-flow?view=azureml-api-2).

```{toctree}
:caption: Tracing
:maxdepth: 2
azureai/tracing/index
```

```{toctree}
:caption: Flow
:maxdepth: 2
Expand Down
Binary file added docs/media/trace/trace-ui-portal-demo.gif
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
33 changes: 31 additions & 2 deletions docs/reference/pf-command-reference.md
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ pf flow test --flow
[--interactive]
[--verbose]
[--ui]
[--collection]
```

#### Examples
Expand Down Expand Up @@ -160,6 +161,12 @@ Chat in the chat window.
pf flow test --flow <path-to-flow-directory> --ui
```

Test the flow while log traces to a specific collection.

```bash
pf flow test --flow <path-to-flow-directory> --collection <collection>
```

#### Required Parameter

`--flow`
Expand Down Expand Up @@ -886,10 +893,32 @@ pf config set

#### Examples

Config connection provider to azure workspace for current user.
**Connection provider**

Set connection provider to Azure ML workspace or Azure AI project for current user.

```bash
pf config set connection.provider="azureml://subscriptions/<subscription-id>/resourceGroups/<resource-group-name>/providers/Microsoft.MachineLearningServices/workspaces/<workspace-or-project-name>"
```

**Tracing**

Set trace destination to Azure ML workspace or Azure AI project.

```bash
pf config set trace.destination="azureml://subscriptions/<subscription-id>/resourceGroups/<resource-group-name>/providers/Microsoft.MachineLearningServices/workspaces/<workspace-or-project-name>"
```

Only log traces to local.

```bash
pf config set trace.destination="local"
```

Disable tracing feature.

```bash
pf config set connection.provider="azureml://subscriptions/<your-subscription>/resourceGroups/<your-resourcegroup>/providers/Microsoft.MachineLearningServices/workspaces/<your-workspace>"
pf config set trace.destination="none"
```

### pf config show
Expand Down
2 changes: 1 addition & 1 deletion examples/flex-flows/chat-stream/flow.flex.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@ $schema: https://azuremlschemas.azureedge.net/promptflow/latest/Flow.schema.json
entry: flow:ChatFlow
environment:
# image: mcr.microsoft.com/azureml/promptflow/promptflow-python
python_requirements_txt: requirements.txt
python_requirements_txt: requirements.txt
2 changes: 1 addition & 1 deletion scripts/dev-setup/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def install_pkg_editable(pkg: str, verbose: bool, is_vscode: bool = False) -> No
# we should be able to remove this after we fully deprecate promptflow in local development
if is_vscode:
with open(pkg_working_dir / "promptflow" / "__init__.py", mode="w", encoding="utf-8") as f:
f.write("")
f.write("__path__ = __import__('pkgutil').extend_path(__path__, __name__)\n")


@dataclass
Expand Down
9 changes: 9 additions & 0 deletions src/promptflow-azure/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
# promptflow-azure package

## v1.11.0 (Upcoming)

### Improvements
- Refine trace Cosmos DB setup process to print setup status during the process, and display error message from service when setup failed.
- Return the secrets in the connection object by default to improve flex flow experience.
- Behaviors not changed: 'pfazure connection' command will scrub secrets.
- New behavior: connection object by `client.connection.get` will have real secrets. `print(connection_obj)` directly will scrub those secrets. `print(connection_obj.api_key)` or `print(connection_obj.secrets)` will print the REAL secrets.
- Workspace listsecrets permission is required to get the secrets. Call `client.connection.get(name, with_secrets=True)` if you want to get without the secrets and listsecrets permission.

## v1.10.0 (2024.04.26)

## v1.9.0 (2024.04.17)
8 changes: 8 additions & 0 deletions src/promptflow-azure/promptflow/azure/_constants/_trace.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------

COSMOS_DB_SETUP_POLL_TIMEOUT_SECOND = 600
COSMOS_DB_SETUP_POLL_INTERVAL_SECOND = 30
COSMOS_DB_SETUP_POLL_PRINT_INTERVAL_SECOND = 30
COSMOS_DB_SETUP_RESOURCE_TYPE = "HOBO"
Loading

0 comments on commit 47922fe

Please sign in to comment.