Skip to content

Commit

Permalink
Merge branch 'add_tn_build' into 'master'
Browse files Browse the repository at this point in the history
Add tn build

See merge request deep-learning/tensornet!8
  • Loading branch information
gzm55 committed May 30, 2024
2 parents d9890f9 + 996b03c commit ee598dd
Show file tree
Hide file tree
Showing 10 changed files with 328 additions and 27 deletions.
71 changes: 71 additions & 0 deletions .github/workflows/python.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
name: Build Tensornet

on:
push:
branches:
- '**' # matches every branch
tags:
- 'v[0-9]+'
- 'v[0-9]+\.[0-9]+'
- 'v[0-9]+\.[0-9]+\.[0-9]+'
pull_request:

jobs:
tn_build:
runs-on: ubuntu-latest
steps:
- name: checkout repository
uses: actions/checkout@v4

- uses: mamba-org/setup-micromamba@v1
with:
micromamba-version: '1.5.8-0'
environment-file: config/tn_build.yaml
init-shell: bash
cache-downloads: true
post-cleanup: 'none'
- name: Run custom command in micromamba environment
run: ./manager build
shell: micromamba-shell {0}

- name: Create setup dist
run: ./manager create_dist
shell: micromamba-shell {0}

- name: Store wheels
uses: actions/upload-artifact@v4
with:
path: dist/
retention-days: 7

publish-to-test-pypi:
name: Upload to test-pypi
needs:
- tn_build
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download wheels
uses: actions/download-artifact@v4
with:
path: dist/
- name: Publish wheels to test-PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
repository-url: https://test.pypi.org/legacy/

publish-to-pypi:
name: Upload to pypi
if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes
needs:
- build
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download wheels
uses: actions/download-artifact@v4
with:
path: dist/
- name: Publish wheels to PyPI
uses: pypa/gh-action-pypi-publish@release/v1

32 changes: 32 additions & 0 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
tn_build:
stage: build
tags:
- image-unlocked
image:
name: r.addops.soft.360.cn/sycp-container/centos7.2:base

variables:
NEED_PREPARE_ENV: "true"

before_script:
- mkdir -p ${HOME}/.config/pip
- |
cat > "${HOME}/.config/pip/pip.conf" <<END
[global]
index-url = ${NEXUS3_PYPI_REPO}
END
script:
- sed -i "s|https://github.com|${NEXUS3_HEADER}/github.com|g" WORKSPACE
- ./manager build
- ./manager deploy
cache:
- key: cache-$CI_COMMIT_REF_NAME
paths:
- /root/.cache/bazel/_bazel_root/cache
- /root/.cache/bazel/_bazel_root/install
- /root/micromamba/pkgs
when: manual
#only:
#- tags

29 changes: 9 additions & 20 deletions WORKSPACE
Original file line number Diff line number Diff line change
Expand Up @@ -12,17 +12,6 @@ http_archive(
],
)

#http_archive(
# name = "org_tensorflow",
# sha256 = "2595a5c401521f20a2734c4e5d54120996f8391f00bb62a57267d930bce95350",
# strip_prefix = "tensorflow-2.3.0",
# urls = [
# "https://github.com/tensorflow/tensorflow/archive/v2.3.0.tar.gz",
# ],
#)

# copy from @org_tensorflow/WORKSPACE
# TensorFlow build depends on these dependencies.
http_archive(
name = "io_bazel_rules_closure",
sha256 = "5b00383d08dd71f28503736db0500b6fb4dda47489ff5fc6bed42557c07c6ba9",
Expand All @@ -36,10 +25,9 @@ http_archive(
http_archive(
name = "brpc",
urls = [
"https://github.com/apache/incubator-brpc/archive/0.9.7.tar.gz"
"https://github.com/apache/brpc/archive/0.9.7.tar.gz"
],
sha256 = "722cd342baf3b05189ca78ecf6c56ea6ffec22e62fc2938335e4e5bab545a49c",
strip_prefix = "incubator-brpc-0.9.7",
strip_prefix = "brpc-0.9.7",
)

# depend by brpc
Expand All @@ -50,12 +38,13 @@ http_archive(
url = "https://github.com/google/leveldb/archive/a53934a3ae1244679f812d998a4f16f2c7f309a6.tar.gz"
)

git_repository(
name = "com_github_nelhage_rules_boost",
commit = "fe9a0795e909f10f2bfb6bfa4a51e66641e36557",
remote = "https://github.com/nelhage/rules_boost",
shallow_since = "1570056263 -0700",
)
http_archive(
name = "com_github_nelhage_rules_boost",
urls = [
"https://github.com/nelhage/rules_boost/archive/fe9a0795e909f10f2bfb6bfa4a51e66641e36557.tar.gz",
],
strip_prefix = "rules_boost-fe9a0795e909f10f2bfb6bfa4a51e66641e36557",
)

load("@com_github_nelhage_rules_boost//:boost/boost.bzl", "boost_deps")
boost_deps()
Expand Down
23 changes: 23 additions & 0 deletions config/tn_build.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
name: tn_build
channels:
- conda-forge
dependencies:
- python=3.7
- nomkl
- bazel==3.1.0
- openmpi==4.1.3
- openssl==1.1.1t
- libxcrypt==4.4.28
- gcc==10.3.0
- gxx==10.3.0
- libstdcxx-devel_linux-64==10.3.0
- openjdk==8.0.382
- patch
- pip
- pip:
- tensorflow==2.2.0
- protobuf<3.21
- grpcio<1.47 # Only for CentOS 6
- h5py<3.8 # Only for CentOS 6
- twine
- wheel
2 changes: 1 addition & 1 deletion core/kernels/data/balance_dataset_ops.cc
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ class BalanceDatasetOp::Dataset : public DatasetBase {
return data::model::MakeKnownRatioNode(std::move(args), /*ratio=*/1);
}

#if defined(TN_COMPATIBLE_INTERFACE_2_2_0)
#if defined(TN_COMPATIBLE_INTERFACE_2_2)
Status SaveInternal(IteratorStateWriter* writer) override {
mutex_lock l(mu_);
if (!input_impl_) {
Expand Down
2 changes: 1 addition & 1 deletion core/kernels/data/balance_dataset_ops_dummy.cc
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ class BalanceDatasetOp::Dataset : public DatasetBase {
return data::model::MakeKnownRatioNode(std::move(args), /*ratio=*/1);
}

#if defined(TN_COMPATIBLE_INTERFACE_2_2_0)
#if defined(TN_COMPATIBLE_INTERFACE_2_2)
Status SaveInternal(IteratorStateWriter* writer) override {
mutex_lock l(mu_);
if (!input_impl_) {
Expand Down
7 changes: 4 additions & 3 deletions core/public/version.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,10 @@

#include "tensorflow/core/public/version.h"

// tensornet support tensorflow-2.3.0, the following macro is compatible with tensorflow-2.2.0
#if (TF_MAJOR_VERSION == 2) && (TF_MINOR_VERSION == 2) && (TF_PATCH_VERSION == 0)
#define TN_COMPATIBLE_INTERFACE_2_2_0 1
// tensornet support tensorflow-2.3, the following macro is compatible with tensorflow-2.2
// https://github.com/tensorflow/tensorflow/releases/tag/v2.3.0
#if (TF_MAJOR_VERSION == 2) && (TF_MINOR_VERSION == 2)
#define TN_COMPATIBLE_INTERFACE_2_2 1
#endif

#endif // TENSORNET_CORE_PUBLIC_VERSION_H_
159 changes: 159 additions & 0 deletions manager
Original file line number Diff line number Diff line change
@@ -0,0 +1,159 @@
#!/usr/bin/env bash

[[ ${DEBUG-} != true ]] || set -x

readonly WORKSPACE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
readonly TN_BUILD_ENV_NAME=tn_build

export MAMBA_EXE=${HOME}/.local/bin/micromamba
export MAMBA_ROOT_PREFIX=${HOME}/micromamba

: ${NEED_PREPARE_ENV:=false}
: ${NEED_ACTIVATE_ENV:=true}

die() {
local err=$? err_fmt=
(( err )) && err_fmt=" (err=$err)" || err=1
printf >&2 "[ERROR]$err_fmt %s\n" "$*"
exit $err
}

_prepare_mamba_env(){
if ! type micromamba >/dev/null 2>&1;then
HTTPS_PROXY=${PROXY_URL:=${HTTPS_PROXY}} "${SHELL}" <(curl -L micro.mamba.pm/install.sh)
fi
_mamba_source
[[ -z ${NEXUS3_HEADER} ]] || {
${MAMBA_EXE} config set --file "${MAMBA_ROOT_PREFIX}/.mambarc" channel_alias ${NEXUS3_HEADER}/conda
}
micromamba create -y -f ${WORKSPACE_DIR}/config/${TN_BUILD_ENV_NAME}.yaml
micromamba activate ${TN_BUILD_ENV_NAME}
}

_mamba_source() {
[[ -e ${MAMBA_EXE} ]] || { echo "no micromamba exe found, run ./manager prepare_build_env to create env"; exit 1;}
__mamba_setup="$("$MAMBA_EXE" shell hook --shell bash --root-prefix "$MAMBA_ROOT_PREFIX" 2> /dev/null)"
if [ $? -eq 0 ]; then
eval "$__mamba_setup"
else
alias micromamba="$MAMBA_EXE" # Fallback on help from mamba activate
fi
unset __mamba_setup
}

_activate_env() {
_mamba_source
micromamba activate ${TN_BUILD_ENV_NAME}
}

_prepare_compile_env() {
CUR_ENV_PATH=$(ompi_info --parsable --path prefix 2>/dev/null | awk -F":" '{print $NF}')
export C_INCLUDE_PATH=${CUR_ENV_PATH}/include
export CPLUS_INCLUDE_PATH=${CUR_ENV_PATH}/include
}

_build_config(){
CUR_ENV_PATH=$(ompi_info --parsable --path prefix 2>/dev/null | awk -F":" '{print $NF}')
cd ${WORKSPACE_DIR}; bash configure.sh --openmpi_path ${CUR_ENV_PATH}
_prepare_compile_env
}

start_build(){
[[ ${NEED_PREPARE_ENV} == true ]] && _prepare_mamba_env
[[ ${NEED_ACTIVATE_ENV} == true ]] && _activate_env
_build_config
extra_opts=("$@")
[[ ${DEBUG-} != true ]] || extra_opts+=(--sandbox_debug)
bazel build "${extra_opts[@]}" -c opt //core:_pywrap_tn.so
}

only_build(){
[[ ${NEED_ACTIVATE_ENV} == true ]] && _activate_env
_prepare_compile_env
extra_opts=("$@")
[[ ${DEBUG-} != true ]] || extra_opts+=(--sandbox_debug)
bazel build "${extra_opts[@]}" -c opt //core:_pywrap_tn.so
}


start_copy_libs(){
rm -f tensornet/core/_pywrap_tn.so || true
cp bazel-bin/core/_pywrap_tn.so tensornet/core/_pywrap_tn.so
}

start_test(){
python -c "import tensorflow as tf;import tensornet as tn;tn.core.init()"
}


start_only_upload(){
[[ ${NEED_ACTIVATE_ENV} == true ]] && _activate_env
export TWINE_USERNAME=${TWINE_USERNAME:=${NEXUS3_USERNAME}}
export TWINE_PASSWORD=${TWINE_PASSWORD:=${NEXUS3_PASSWORD}}
if [[ -z "$TWINE_USERNAME" || -z "$TWINE_PASSWORD" ]];then
echo "need username/password auth, no env "
echo "export NEXUS3_USERNAME=xxxx"
echo "export NEXUS3_PASSWORD=xxxx"
exit 0
fi
[[ -z ${NEXUS3_PYPI_HOST} ]] && { echo "need pypi host address, export NEXUS3_PYPI_HOST=xxx"; exit 0; }
twine upload --verbose --repository-url ${NEXUS3_PYPI_HOST} dist/*
}

start_create_dist(){
[[ ${NEED_PREPARE_ENV} == true ]] && _prepare_mamba_env
[[ ${NEED_ACTIVATE_ENV} == true ]] && _activate_env
rm -rf dist/* || true
start_copy_libs
[[ $# > 0 ]] && export TN_VERSION=$1
PY_VERSION=$(python -c "import sys; print('cp' + ''.join(map(str, sys.version_info[:2])))")
python setup.py bdist_wheel --plat-name manylinux2010_x86_64 --python-tag ${PY_VERSION}
}

start_upload(){
start_create_dist
start_only_upload
}

case "$1" in
(prepare_build_env)
_prepare_mamba_env
;;
(build)
shift 1
start_build "$@"
;;
(only-build)
shift 1
only_build "$@"
;;
(deploy)
shift 1
start_upload "$@"
;;
(copy-libs)
start_copy_libs
;;
(create_dist)
shift 1
start_create_dist "$@"
;;
(help)
cmd=$(basename -- "$0")
cat <<-END
Usage:
$cmd help - Print this help.
$cmd prepare_build_env - install micromamba environment.
$cmd build [args..] - Build tn so file.
$cmd only-build [args..] - Build tn so file without config mpi
$cmd deploy [version] - deploy tn to pypi
$cmd create_dist [version] - create setup dist without upload
END
;;
(*) die Unknown command "$1" ;;
esac
27 changes: 27 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import os
from setuptools import setup, find_packages

version = os.environ.get('TN_VERSION', '0.1.2')

setup(
name='qihoo-tensornet',
version=version,
description='tensornet',
author='jiangxinglei',
author_email='[email protected]',
url='https://github.com/Qihoo360/tensornet',
packages=find_packages(),
package_data = {
"tensornet.core": ["_pywrap_tn.so"],
},
install_requires=[
'tensorflow>=2.2,<2.3'
],
python_requires='>=3.7, <3.8',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3.7'
],
)
Loading

0 comments on commit ee598dd

Please sign in to comment.