Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • mika/sssd
  • guillem/debian-pkg-sssd
  • john.veitch/sssd
  • jgullberg/sssd
  • gioele/sssd
  • oktay454/sssd
  • sergiodj/sssd
  • 3v1n0/sssd
  • jfalk-guest/sssd
  • sathieu/sssd
  • dpward/sssd
  • sssd-team/sssd
  • ahasenack/sssd
  • jbicha/sssd
  • yrro-guest/sssd
15 results
Show changes
Commits on Source (2850)
Showing
with 970 additions and 249 deletions
srpm:
dnf -y install git rpm-build dnf-plugins-core libldb-devel
./contrib/fedora/make_srpm.sh --output $(outdir)
./contrib/fedora/make_srpm.sh --prerelease --output $(outdir)
......@@ -2,8 +2,53 @@ COMPONENT: Subject
Explanation
Resolves:
https://pagure.io/SSSD/sssd/issue/XXXX
Resolves: https://github.com/SSSD/sssd/issues/XXXX
# If a release note is required, choose one of the tags (or multiple tags if
# it makes sense) and place it here. See the description below for tag names
# and information. This is fully optional: not all changes require a release
# note.
#
# :relnote: Generic release note.
# :feature: New feature description.
# :fixes: Notable bug fix description.
# :packaging: Packaging change description.
# :config: Change in configuration (new option, new default, etc.)
# Try to keep the subject line within 52 chars ----|
# Also please try to not exceed 72 characters of length for the body --|
#
# *** Release notes ***
#
# Release notes for new versions are automatically generated from the
# information provided in commit messages.
#
# Ticket Resolution:
# If "Resolves: ticket_url" is found in the commit message then this ticket
# will be automatically closed when this commit is pushed to the upstream
# repository. The ticket will be also mentioned in the release notes as fixed.
#
# Ideally, each commit should resolve at most one ticket. If multiple tickets
# are resolved then repeat the whole line, i.e.:
# Resolves: Ticket #1
# Resolves: Ticket #2
#
# Release Notes Content
# You can also provide short description of the fix or new feature for
# the release notes using one of the release notes tag. The tag is associated
# with a human readable description which is automatically put into the
# release notes into the correct group that is determined by the tag name.
#
# The description is read until an empty line is found. And it can contain
# markdown language for enhanced formatting.
#
# Example:
# SUBJECT
#
# Commit description.
#
# Resolves: https://github.com/SSSD/sssd/issues/XXXX
#
# :fixes: This is an important bug that has been fixed. Keep the
# description short but it can also span multiple lines.
#
name: Build SSSD's source rpm
inputs:
version:
description: Package version.
required: true
release:
description: Package release.
required: false
default: '${{ github.run_number }}'
working-directory:
description: Working directory.
required: false
default: '.'
outputs:
file:
description: Source rpm file name.
value: ${{ steps.srpm.outputs.file }}
path:
description: Path to the source rpm.
value: ${{ steps.srpm.outputs.path }}
runs:
using: 'composite'
steps:
# '-' is an illegal character for RPM version tag
- name: Sanitize version
id: sanitize
shell: bash
run: |
version="$(echo ${{ inputs.version }} | sed 's/-/_/g')"
echo "version=$version" >> $GITHUB_OUTPUT
- name: Generate tarball and spec file
shell: bash
run: |
pushd '${{ inputs.working-directory }}'
release="${{ inputs.release }}"
name="sssd-${{ steps.sanitize.outputs.version }}"
tar -cvzf "$name.tar.gz" --transform "s,^,$name/," *
cp contrib/sssd.spec.in ./sssd.spec
sed -iE "s/@PACKAGE_NAME@/sssd/g" ./sssd.spec
sed -iE "s/@PACKAGE_VERSION@/${{ steps.sanitize.outputs.version }}/g" ./sssd.spec
sed -iE "s/@PRERELEASE_VERSION@/$release/g" ./sssd.spec
popd
- name: Build source rpm
id: srpm
uses: next-actions/build-srpm@master
with:
tarball: ${{ inputs.working-directory }}/sssd-${{ steps.sanitize.outputs.version }}.tar.gz
specfile: ${{ inputs.working-directory }}/sssd.spec
name: 'Configure SSSD'
description: 'Configure SSSD'
runs:
using: "composite"
steps:
- shell: bash
run: |
source contrib/fedora/bashrc_sssd
pushd contrib/ci/
. configure.sh
popd
reconfig "${CONFIGURE_ARG_LIST[@]}"
name: 'Install dependencies'
description: 'Install dependencies to build sssd'
runs:
using: "composite"
steps:
- shell: bash
run: |
sudo ./contrib/ci/run --deps-only
name: "Analyze (target)"
on:
pull_request_target:
branches: [master, sssd-2-7, sssd-2-8, sssd-2-9]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
jobs:
cppcheck:
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
persist-credentials: false
- name: Perform cppcheck analysis
# v0.0.11 is the latest release but we need a later commit
uses: linuxdeepin/action-cppcheck@e63fb1d3f321e0467737aa9de7f691360fb1b8fb
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
repository: ${{ github.repository }}
pull_request_id: ${{ github.event.pull_request.number }}
allow_approve: false
enable_checks: "warning,unusedFunction,missingInclude"
comment_result: false
covscan:
runs-on: covscan
permissions:
contents: read
timeout-minutes: 1440
steps:
- name: Checkout target branch
uses: actions/checkout@v4
with:
ref: ${{ github.base_ref }}
path: target
- name: Checkout pull request branch
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
path: pr
- name: Build source rpm - ${{ github.base_ref }}
id: target
uses: ./target/.github/actions/build-sssd-srpm
with:
working-directory: target
version: ${{ github.base_ref }}
- name: Build source rpm - pr${{ github.event.pull_request.number }}
id: pr
uses: ./target/.github/actions/build-sssd-srpm
with:
working-directory: pr
version: pr${{ github.event.pull_request.number }}
- name: Run covscan
run: |
run-covscan --base-srpm "${{ steps.target.outputs.path }}" --srpm "${{ steps.pr.outputs.path }}" --output-dir logs
- name: Print result
uses: next-actions/print-logs@master
if: always()
with:
working-directory: logs
files: |
added.err
*.err
- name: Upload artifacts
if: always()
uses: actions/upload-artifact@v3
with:
if-no-files-found: ignore
name: covscan
path: |
./logs/*.err
result:
name: All tests are successful
if: ${{ always() }}
runs-on: ubuntu-latest
needs: [cppcheck, covscan]
steps:
- name: Fail on failure
if: ${{ needs.cppcheck.result != 'success' || needs.covscan.result != 'success' }}
run: exit 1
name: "ci"
on:
push:
branches: [master, sssd-2-7, sssd-2-8, sssd-2-9]
pull_request:
branches: [master, sssd-2-7, sssd-2-8, sssd-2-9]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build:
runs-on: ubuntu-latest
container: quay.io/sssd/ci-client-devel:latest
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install dependencies
id: dependencies
uses: ./.github/actions/install-dependencies
- name: Configure sssd
uses: ./.github/actions/configure
- name: make
shell: bash
working-directory: x86_64
run: |
source ../contrib/fedora/bashrc_sssd
make CFLAGS+="$SSS_WARNINGS -Werror -Wno-error=deprecated-declarations"
- name: make check
shell: bash
working-directory: x86_64
run: |
source ../contrib/fedora/bashrc_sssd
make CFLAGS+="$SSS_WARNINGS -Werror -Wno-error=deprecated-declarations" check
- name: make distcheck
shell: bash
working-directory: x86_64
run: |
source ../contrib/fedora/bashrc_sssd
make distcheck
- uses: actions/upload-artifact@v3
if: always()
with:
name: build
path: |
x86_64/config.log
x86_64/config.h
x86_64/test-suite.log
if-no-files-found: ignore
prepare:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
matrix: ${{ steps.matrix.outputs.matrix }}
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Get matrix
id: matrix
run: ./contrib/ci/get-matrix.py --action
intgcheck:
needs: [prepare, build]
strategy:
fail-fast: false
matrix:
tag: ${{ fromJson(needs.prepare.outputs.matrix).intgcheck }}
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
path: sssd
- name: Setup containers
uses: SSSD/sssd-ci-containers/actions/setup@master
with:
path: sssd-ci-containers
tag: ${{ matrix.tag }}
limit: dns client
override: |
services:
client:
image: ${REGISTRY}/ci-client-devel:${TAG}
volumes:
- ../sssd:/sssd:rw
- name: Run integration tests
uses: SSSD/sssd-ci-containers/actions/exec@master
with:
working-directory: /sssd
script: ./contrib/ci/run --moderate
- name: Print logs
uses: next-actions/print-logs@master
if: always()
with:
working-directory: ./sssd/ci-build-debug
files: |
test-suite.log
ci-make-intgcheck.log
ci-make-distcheck.log
- name: Upload main artifacts
if: always()
uses: actions/upload-artifact@v3
with:
if-no-files-found: ignore
name: ${{ matrix.tag }}-intgcheck
path: |
./sssd/*.log
./sssd/ci-build-debug/ci-*.log
./sssd/ci-build-debug/test-suite.log
./sssd/ci-build-debug/ci-mock-result/*.log
- name: Upload valgrind artifacts
if: always()
uses: actions/upload-artifact@v3
with:
if-no-files-found: ignore
name: ${{ matrix.tag }}-intgcheck-valgrind
path: |
./sssd/ci-build-debug/*.valgrind.log
multihost:
needs: [prepare, build]
strategy:
fail-fast: false
matrix:
tag: ${{ fromJson(needs.prepare.outputs.matrix).multihost }}
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout sssd repository
uses: actions/checkout@v4
with:
path: sssd
- name: Setup containers
uses: SSSD/sssd-ci-containers/actions/setup@master
with:
path: sssd-ci-containers
tag: ${{ matrix.tag }}
limit: dns client
override: |
services:
client:
image: ${REGISTRY}/ci-client-devel:${TAG}
shm_size: 4G
tmpfs:
- /dev/shm
volumes:
- ../sssd:/sssd:rw
- name: Build SSSD on the client
uses: SSSD/sssd-ci-containers/actions/exec@master
with:
log-file: multihost-build.log
working-directory: /sssd
script: |
#!/bin/bash
set -ex
./contrib/ci/run --deps-only
autoreconf -if
mkdir -p /dev/shm/sssd
pushd /dev/shm/sssd
/sssd/configure --enable-silent-rules
make rpms
- name: Install SSSD on the client
uses: SSSD/sssd-ci-containers/actions/exec@master
with:
log-file: multihost-install.log
user: root
script: |
#!/bin/bash
set -ex
dnf remove -y --noautoremove sssd\*
dnf install -y /dev/shm/sssd/rpmbuild/RPMS/*/*.rpm
rm -fr /dev/shm/sssd
- name: Install multihost tests dependencies
shell: bash
run: |
set -ex
sudo apt-get update
# Install certutil and dependencies for python-ldap
sudo apt-get install -y libnss3-tools libsasl2-dev python3-dev libldap2-dev libssl-dev
# Virtualenv
pip3 install virtualenv
python3 -m venv .venv
source .venv/bin/activate
# Install multihost tests requirements
pip3 install -r ./sssd/src/tests/multihost/requirements.txt
- name: Create multihost configuration
uses: DamianReeves/write-file-action@0a7fcbe1960c53fc08fe789fa4850d24885f4d84
with:
path: mhc.yml
write-mode: overwrite
contents: |
root_password: 'Secret123'
domains:
- name: tier0.tests
type: sssd
hosts:
- name: client
external_hostname: client.test
role: master
- name: Run basic multihost tests
run: |
set -ex -o pipefail
source .venv/bin/activate
pytest -s --multihost-config=./mhc.yml ./sssd/src/tests/multihost/basic |& tee multihost-pytest.log
- name: Upload artifacts
if: always()
uses: actions/upload-artifact@v3
with:
if-no-files-found: ignore
name: ${{ matrix.tag }}-multihost
path: |
sssd/ci-install-deps.log
multihost-build.log
multihost-install.log
multihost-pytest.log
system:
needs: [prepare, build]
strategy:
fail-fast: false
matrix:
tag: ${{ fromJson(needs.prepare.outputs.matrix).multihost }}
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout sssd repository
uses: actions/checkout@v4
with:
path: sssd
- name: Setup containers
uses: SSSD/sssd-ci-containers/actions/setup@master
with:
path: sssd-ci-containers
tag: ${{ matrix.tag }}
override: |
services:
client:
image: ${REGISTRY}/ci-client-devel:${TAG}
shm_size: 4G
tmpfs:
- /dev/shm
volumes:
- ../sssd:/sssd:rw
ipa:
image: ${REGISTRY}/ci-ipa-devel:${TAG}
shm_size: 4G
tmpfs:
- /dev/shm
volumes:
- ../sssd:/sssd:rw
- name: Build SSSD on the client and IPA
uses: SSSD/sssd-ci-containers/actions/exec@master
with:
log-file: build.log
working-directory: /sssd
where: |
client
ipa
script: |
#!/bin/bash
set -ex
./contrib/ci/run --deps-only
autoreconf -if
mkdir -p /dev/shm/sssd
pushd /dev/shm/sssd
/sssd/configure --enable-silent-rules
make rpms
- name: Install SSSD on the client and IPA
uses: SSSD/sssd-ci-containers/actions/exec@master
with:
log-file: install.log
user: root
where: |
client
ipa
script: |
#!/bin/bash
set -ex
dnf install -y /dev/shm/sssd/rpmbuild/RPMS/*/*.rpm
rm -fr /dev/shm/sssd
# We need to reenable sssd-kcm since it was disabled by removing sssd not not enabled again
systemctl enable --now sssd-kcm.socket
- name: Restart SSSD on IPA server
uses: SSSD/sssd-ci-containers/actions/exec@master
with:
user: root
where: ipa
script: |
#!/bin/bash
set -ex
systemctl restart sssd || systemctl status sssd
- name: Install system tests dependencies
shell: bash
working-directory: ./sssd/src/tests/system
run: |
set -ex
sudo apt-get update
# Install dependencies for python-ldap
sudo apt-get install -y libsasl2-dev python3-dev libldap2-dev libssl-dev
# Virtualenv
pip3 install virtualenv
python3 -m venv .venv
source .venv/bin/activate
# Install system tests requirements
pip3 install -r ./requirements.txt
# Install yq to parse yaml files
sudo wget -qO /usr/local/bin/yq https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64
sudo chmod a+x /usr/local/bin/yq
- name: Remove ad from mhc.yaml
shell: bash
working-directory: ./sssd/src/tests/system
run: |
yq -i 'del(.domains[0].hosts.[] | select(.role == "ad"))' mhc.yaml
- name: Check polarion metadata
shell: bash
working-directory: ./sssd/src/tests/system
run: |
# Run pytest in collect only mode to quickly catch issues in Polarion metadata.
set -ex -o pipefail
mkdir -p $GITHUB_WORKSPACE/artifacts
source .venv/bin/activate
pytest \
--color=yes \
--mh-config=./mhc.yaml \
--mh-log-path=$GITHUB_WORKSPACE/mh.log \
--mh-artifacts-dir=$GITHUB_WORKSPACE/artifacts \
--polarion-config=./polarion.yaml \
--output-polarion-testcase=$GITHUB_WORKSPACE/artifacts/testcase.xml \
--collect-only . |& tee $GITHUB_WORKSPACE/pytest-collect.log
- name: Run tests
shell: bash
working-directory: ./sssd/src/tests/system
run: |
set -ex -o pipefail
mkdir -p $GITHUB_WORKSPACE/artifacts
source .venv/bin/activate
pytest \
--color=yes \
--mh-config=./mhc.yaml \
--mh-log-path=$GITHUB_WORKSPACE/mh.log \
--mh-artifacts-dir=$GITHUB_WORKSPACE/artifacts \
--polarion-config=./polarion.yaml \
--output-polarion-testcase=$GITHUB_WORKSPACE/artifacts/testcase.xml \
--output-polarion-testrun=$GITHUB_WORKSPACE/artifacts/testrun.xml \
-vvv . |& tee $GITHUB_WORKSPACE/pytest.log
- name: Upload artifacts
if: always()
uses: actions/upload-artifact@v3
with:
if-no-files-found: ignore
name: ${{ matrix.tag }}-system
path: |
sssd/ci-install-deps.log
artifacts
mh.log
build.log
install.log
pytest.log
pytest-collect.log
result:
name: All tests are successful
if: ${{ always() }}
runs-on: ubuntu-latest
needs: [build, intgcheck, multihost, system]
steps:
- name: Fail on failure
if: |
needs.build.result != 'success'
|| needs.intgcheck.result != 'success'
|| needs.multihost.result != 'success'
|| needs.system.result != 'success'
run: exit 1
# Build project in Fedora copr with multiple chroots.
#
# The project is build for each pull request and it will be availale in copr as
# @sssd/pr#number. If the build is successful, it can be then installed with:
# dnf copr enable @sssd/pr#number.
#
# The project is automatically deleted after 60 days or after the pull request
# is closed, whatever happens first. It is rebuild with each pull request
# update.
#
# The source rpm used to build the project in copr is attached as an artifact to
# this check.
#
# Simplified flow:
# - build srpm (rvn == sssd-pr#number-#runid) and upload it as an artifact
# - obtain list of desired chroots
# - create copr project @sssd/pr#number
# - cancel previous pending builds
# - build project - there is one job (and one commit status) per chroot
name: copr
on:
pull_request_target:
branches: [master, sssd-2-7, sssd-2-8, sssd-2-9]
types: [opened, synchronize, reopened]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
env:
COPR_ACCOUNT: '@sssd'
COPR_PROJECT: pr${{ github.event.pull_request.number }}
PR_ID: ${{ github.event.pull_request.number }}
PR_URL: ${{ github.event.pull_request.html_url }}
jobs:
prepare:
runs-on: ubuntu-latest
outputs:
srpm: ${{ steps.srpm.outputs.file }}
chroots_json: ${{ steps.chroots.outputs.json }}
permissions:
contents: read
steps:
- name: Checkout source
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
- name: Build source rpm
id: srpm
uses: ./.github/actions/build-sssd-srpm
with:
version: 9.${{ env.COPR_PROJECT }}
- name: Upload source rpm as an artifact
uses: actions/upload-artifact@v3
with:
name: ${{ steps.srpm.outputs.file }}
path: ${{ steps.srpm.outputs.path }}
- name: Initialize copr actions
id: copr
uses: next-actions/copr/init@master
with:
token: ${{ secrets.COPR_SECRETS }}
- name: Get copr chroots
id: chroots
uses: next-actions/copr/filter-chroots@master
with:
coprcfg: ${{ steps.copr.outputs.coprcfg }}
filter: "fedora-.+-x86_64|centos-stream-.*-x86_64"
exclude: "fedora-eln-.+"
- name: Create copr project
uses: next-actions/copr/create-project@master
with:
coprcfg: ${{ steps.copr.outputs.coprcfg }}
chroots: ${{ steps.chroots.outputs.list }}
project: ${{ env.COPR_PROJECT }}
account: ${{ env.COPR_ACCOUNT }}
fedora-review: off
description: 'Development package for [sssd pull request #${{ env.PR_ID }}](${{ env.PR_URL }}).'
instructions: 'Use this for test purpose only. Do not use this in production.'
- name: Cancel pending builds
uses: next-actions/copr/cancel-builds@master
with:
coprcfg: ${{ steps.copr.outputs.coprcfg }}
project: ${{ env.COPR_PROJECT }}
account: ${{ env.COPR_ACCOUNT }}
- name: Add buildroot repository to CentOS Stream
env:
coprcfg: ${{ steps.copr.outputs.coprcfg }}
run: |
# CentOS Stream 8
copr-cli --config "$coprcfg" edit-chroot \
--repos 'https://kojihub.stream.centos.org/kojifiles/repos/c8s-build/latest/$basearch' \
--modules idm:DL1 \
$COPR_ACCOUNT/$COPR_PROJECT/centos-stream-8-x86_64
# CentOS Stream 9
copr-cli --config "$coprcfg" edit-chroot \
--repos 'https://kojihub.stream.centos.org/kojifiles/repos/c9s-build/latest/$basearch/' \
$COPR_ACCOUNT/$COPR_PROJECT/centos-stream-9-x86_64
build:
runs-on: ubuntu-latest
needs: [prepare]
strategy:
matrix:
chroot: ${{ fromJson(needs.prepare.outputs.chroots_json) }}
fail-fast: false
steps:
- name: Checkout source
uses: actions/checkout@v4
- name: Downlooad source rpm
uses: actions/download-artifact@v3
with:
name: ${{ needs.prepare.outputs.srpm }}
path: .
- name: Initialize copr actions
id: copr
uses: next-actions/copr/init@master
with:
token: ${{ secrets.COPR_SECRETS }}
- name: Build srpm in copr for ${{ matrix.chroot }}
uses: next-actions/copr/submit-build@master
with:
coprcfg: ${{ steps.copr.outputs.coprcfg }}
srpm: ${{ needs.prepare.outputs.srpm }}
chroots: ${{ matrix.chroot }}
project: ${{ env.COPR_PROJECT }}
account: ${{ env.COPR_ACCOUNT }}
result:
name: All copr builds are successful
if: ${{ always() }}
runs-on: ubuntu-latest
needs: [build]
steps:
- name: Fail on failure
if: ${{ needs.build.result != 'success' }}
run: exit 1
name: copr cleanup
on:
pull_request_target:
branches: [master, sssd-2-7, sssd-2-8, sssd-2-9]
types: [closed]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: false
jobs:
delete:
runs-on: ubuntu-latest
if: ${{ github.event.action == 'closed' }}
steps:
- name: Initialize copr actions
id: copr
uses: next-actions/copr/init@master
with:
token: ${{ secrets.COPR_SECRETS }}
- name: Delete copr project
uses: next-actions/copr/delete-project@master
with:
coprcfg: ${{ steps.copr.outputs.coprcfg }}
project: 'pr${{ github.event.pull_request.number }}'
account: '@sssd'
name: Coverity scan
on:
# run once daily at 00:30 UTC due to
# https://scan.coverity.com/faq#frequency
schedule:
- cron: "30 0 * * *"
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
coverity:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install dependencies
uses: ./.github/actions/install-dependencies
- name: Configure
uses: ./.github/actions/configure
- name: Execute and submit coverity scan
uses: vapier/coverity-scan-action@v1.7.0
with:
email: "sssd-maint@redhat.com"
token: ${{ secrets.COVERITY_SCAN_TOKEN }}
working-directory: x86_64
name: "Static code analysis"
on:
push:
branches: [master, sssd-2-7, sssd-2-8, sssd-2-9]
pull_request:
branches: [master, sssd-2-7, sssd-2-8, sssd-2-9]
schedule:
# Everyday at midnight
- cron: '0 0 * * *'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
codeql:
runs-on: ubuntu-latest
permissions:
security-events: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install dependencies
id: dependencies
uses: ./.github/actions/install-dependencies
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: cpp, python
queries: +security-and-quality
- name: Configure sssd
uses: ./.github/actions/configure
- name: Build sssd
working-directory: x86_64
run: |
PROCESSORS=$(/usr/bin/getconf _NPROCESSORS_ONLN)
make -j$PROCESSORS
- name: Upload configuration artifacts
uses: actions/upload-artifact@v3
if: always()
with:
name: codeql-build
path: |
x86_64/config.log
x86_64/config.h
if-no-files-found: ignore
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
python-system-tests:
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup virtual environment
working-directory: ./src/tests/system
run: |
sudo apt-get update
# Install dependencies for python-ldap
sudo apt-get install -y libsasl2-dev python3-dev libldap2-dev libssl-dev
pip3 install virtualenv
python3 -m venv .venv
source .venv/bin/activate
pip3 install -r ./requirements.txt
pip3 install flake8 pycodestyle isort mypy black
- name: flake8
if: always()
working-directory: ./src/tests/system
run: source .venv/bin/activate && flake8 .
- name: pycodestyle
if: always()
working-directory: ./src/tests/system
run: source .venv/bin/activate && pycodestyle .
- name: isort
if: always()
working-directory: ./src/tests/system
run: source .venv/bin/activate && isort --check-only .
- name: black
if: always()
working-directory: ./src/tests/system
run: source .venv/bin/activate && black --check --diff .
- name: mypy
if: always()
working-directory: ./src/tests/system
run: source .venv/bin/activate && mypy --install-types --non-interactive tests
result:
name: All tests are successful
if: ${{ always() }}
runs-on: ubuntu-latest
needs: [codeql]
steps:
- name: Fail on failure
if: ${{ needs.codeql.result != 'success' }}
run: exit 1
......@@ -112,3 +112,20 @@ sss_ssh_knownhostsproxy
sssd_ssh
test-authtok
/ci-build-*
.pytest_cache
__pycache__
.venv
src/man/sssd_user_name.include
# multihost tests
!src/tests/multihost/sssd
!src/tests/multihost/docs/Makefile
!src/tests/multihost/setup.py
!src/tests/multihost/sssd-testlib.spec
# system tests
src/tests/system/artifacts
src/tests/system/docs/_build
src/tests/system/docs/api
!src/tests/system/docs/Makefile
!src/tests/system/lib/sssd
language: c
compiler: gcc
sudo: required
dist: trusty
services:
- docker
addons:
apt:
packages:
- bash
- tar
- bzip2
env:
global:
# The next declaration is the encrypted COVERITY_SCAN_TOKEN, created
# via the "travis encrypt" command using the project repo's public key
- secure: "gD4XB/tAquGTUFGvQ4+a+K9EbemQtyZs0Py+r7+HAEQ7h/B+fwwRX1h5bGzMUjyCUJ88u28wdRZ0TNxIiEVXuSi/0Ia9BOvdS9YurXdpZc7ha1OpYnJd1tYwxGrgozKW9qXB3R6XZmlcxVGzIHF3fwK9a1p+rNDUihWhasqeAPFFI3IhQhwDIIxO3paRGvHHO0UNlw0+lpgsiQLYIYFWYjHqq2voZ1UlV4Ga7LSP1Yh8F38hDSMk7ykSLedsV1kqxh3zky8p5fLSbDRI1y7PLNBYD63LagUCEk1o3nF+hF0l3nRfEApFJKUhBfccgNc2mdXbBdDxDCnwiArbTXQNxI2Iml85UJ/I5/CS3uE437A3H7ZdvL51w2592JGNMEwq9pxGK3vxcN8g/Yn2Xoo1F2KTVHBexT44LEnS0ADRj5K8AfDsyIUz/rB9+N05k5WXtqcDWblpC5gfD0nk3WQnpmc8hjeI2B9RTFTa3ydA4I5wfABkGfNARH39RxK10d+b176U8x3z05p/PgyraAYKi2kFpA3ha5fw9o1CIqcd5OpUcIWrIo5+FG8hYgtcIG+65PSOHz6gGVZkpZyR4vqIuHIfw4jdi68d6LfoophdhjuFSDTuwgXXGQNjdaYQSpeoZ5Gm9hvHbasabqIBpOfDo/Yjq6up20byvmDaGtoeojI="
before_install:
- ./.travis/travis-docker-build.sh
script:
- docker run -e COVERITY_SCAN_TOKEN=$COVERITY_SCAN_TOKEN --rm sssd/sssd
#!/bin/bash
set -e
# Create an archive of the current checkout
TARBALL=`mktemp -p . tarball-XXXXXX.tar.bz2`
git ls-files |xargs tar cfj $TARBALL .git
sudo docker build -f Dockerfile.deps -t sssd/sssd-deps .
sudo docker build -t sssd/sssd --build-arg TARBALL=$TARBALL .
rm -f $TARBALL
exit 0
#!/bin/bash
#Exit on failures
set -e
pushd /builddir/
# We have to define the _Float* types as those are not defined by coverity and as result
# the codes linking agains those (pretty much anything linking against stdlib.h and math.h)
# won't be covered.
echo "#define _Float128 long double" > /tmp/coverity.h
echo "#define _Float64x long double" >> /tmp/coverity.h
echo "#define _Float64 double" >> /tmp/coverity.h
echo "#define _Float32x double" >> /tmp/coverity.h
echo "#define _Float32 float" >> /tmp/coverity.h
# The coverity scan script returns an error despite succeeding...
CFLAGS="${CFLAGS:- -include /tmp/coverity.h}" \
TRAVIS_BRANCH="${TRAVIS_BRANCH:-master}" \
COVERITY_SCAN_PROJECT_NAME="${COVERITY_SCAN_PROJECT_NAME:-SSSD/sssd}" \
COVERITY_SCAN_NOTIFICATION_EMAIL="${COVERITY_SCAN_NOTIFICATION_EMAIL:-sssd-maint@redhat.com}" \
COVERITY_SCAN_BUILD_COMMAND_PREPEND="${COVERITY_SCAN_BUILD_COMMAND_PREPEND:-source contrib/fedora/bashrc_sssd && reconfig}" \
COVERITY_SCAN_BUILD_COMMAND="${COVERITY_SCAN_BUILD_COMMAND:-make all check TESTS= }" \
COVERITY_SCAN_BRANCH_PATTERN=${COVERITY_SCAN_BRANCH_PATTERN:-master} \
/usr/bin/travisci_build_coverity_scan.sh ||:
popd #builddir
[main]
host = https://www.transifex.com
[sssd.master-po-sssd-pot]
file_filter = po/<lang>.po
source_file = po/sssd.pot
source_lang = en
[sssd.sssd-docspot_1]
file_filter = src/man/po/<lang>.po
source_file = src/man/po/sssd-docs.pot
source_lang = en
The instructions on how to build the SSSD and contribute to the
project can be found here:
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
https://docs.pagure.org/SSSD.sssd/developers/index.html
https://sssd.io/contrib/introduction.html
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
FROM sssd/sssd-deps
MAINTAINER SSSD Maintainers <sssd-maint@redhat.com>
ARG TARBALL
RUN echo -n | openssl s_client -connect scan.coverity.com:443 | sed -ne '/-BEGIN CERTIFICATE-/,/-END CERTIFICATE-/p' | sudo tee -a /etc/ssl/certs/ca- && curl -s https://scan.coverity.com/scripts/travisci_build_coverity_scan.sh -o /usr/bin/travisci_build_coverity_scan.sh && chmod a+x /usr/bin/travisci_build_coverity_scan.sh
ADD $TARBALL /builddir/
ENTRYPOINT /builddir/.travis/travis-tasks.sh
FROM fedora:latest
MAINTAINER SSSD Maintainers <sssd-maint@redhat.com>
ARG TARBALL
RUN dnf -y install git openssl sudo curl wget ruby rubygems "rubygem(json)" wget rpm-build dnf-plugins-core libldb-devel && \
git clone --depth=50 --branch=master https://github.com/SSSD/sssd.git /tmp/sssd && \
cd /tmp/sssd && \
./contrib/fedora/make_srpm.sh && \
dnf builddep -y rpmbuild/SRPMS/sssd-*.src.rpm && \
dnf -y clean all
def untrusted = false
pipeline {
agent none
options {
timeout(time: 10, unit: 'HOURS')
checkoutToSubdirectory('sssd')
}
environment {
NAME = "$BRANCH_NAME/$BUILD_ID"
BASE_DIR = "/home/fedora"
GH_CONTEXT = "sssd-ci"
GH_SUCCESS = "Success."
GH_PENDING = "Build is pending."
GH_FAILURE = "Build failed."
GH_URL = "https://pagure.io/SSSD/sssd"
AWS_BASE = "https://s3.eu-central-1.amazonaws.com/sssd-ci"
SUITE_DIR = "$BASE_DIR/sssd-test-suite"
ARCHIVE = "$BASE_DIR/scripts/archive.sh"
RUN = "./sssd/contrib/test-suite/run.sh"
}
stages {
stage('Prepare') {
steps {
githubNotify status: 'PENDING', context: "$GH_CONTEXT", description: 'Running tests.', targetUrl: "$GH_URL"
}
}
stage('Read trusted files') {
steps {
readTrusted './contrib/test-suite/run.sh'
readTrusted './contrib/test-suite/run-client.sh'
}
post {
failure {
script {
untrusted = true
}
}
}
}
stage('Run Tests') {
parallel {
stage('Test on Fedora 28') {
agent {label "sssd-ci"}
environment {
TEST_SYSTEM = "fedora28"
GH_CONTEXT = "$GH_CONTEXT/fedora28"
GH_URL = "$AWS_BASE/$BRANCH_NAME/$BUILD_ID/$TEST_SYSTEM/index.html"
CONFIG = "$BASE_DIR/configs/${TEST_SYSTEM}.json"
}
steps {
githubNotify status: 'PENDING', context: "$GH_CONTEXT", description: "$GH_PENDING", targetUrl: "$GH_URL"
sh '$RUN "$WORKSPACE/sssd" "$SUITE_DIR" "$WORKSPACE/artifacts/$TEST_SYSTEM" "$CONFIG"'
}
post {
always {
archiveArtifacts artifacts: "artifacts/**", allowEmptyArchive: true
sh '$ARCHIVE $TEST_SYSTEM $WORKSPACE/artifacts/$TEST_SYSTEM $NAME'
sh 'rm -fr "$WORKSPACE/artifacts/$TEST_SYSTEM"'
}
failure {
githubNotify status: 'FAILURE', context: "$GH_CONTEXT", description: "$GH_FAILURE", targetUrl: "$GH_URL"
}
success {
githubNotify status: 'SUCCESS', context: "$GH_CONTEXT", description: "$GH_SUCCESS", targetUrl: "$GH_URL"
}
}
}
stage('Test on Fedora 29') {
agent {label "sssd-ci"}
environment {
TEST_SYSTEM = "fedora29"
GH_CONTEXT = "$GH_CONTEXT/fedora29"
GH_URL = "$AWS_BASE/$BRANCH_NAME/$BUILD_ID/$TEST_SYSTEM/index.html"
CONFIG = "$BASE_DIR/configs/${TEST_SYSTEM}.json"
}
steps {
githubNotify status: 'PENDING', context: "$GH_CONTEXT", description: "$GH_PENDING", targetUrl: "$GH_URL"
sh '$RUN "$WORKSPACE/sssd" "$SUITE_DIR" "$WORKSPACE/artifacts/$TEST_SYSTEM" "$CONFIG"'
}
post {
always {
archiveArtifacts artifacts: "artifacts/**", allowEmptyArchive: true
sh '$ARCHIVE $TEST_SYSTEM $WORKSPACE/artifacts/$TEST_SYSTEM $NAME'
sh 'rm -fr "$WORKSPACE/artifacts/$TEST_SYSTEM"'
}
failure {
githubNotify status: 'FAILURE', context: "$GH_CONTEXT", description: "$GH_FAILURE", targetUrl: "$GH_URL"
}
success {
githubNotify status: 'SUCCESS', context: "$GH_CONTEXT", description: "$GH_SUCCESS", targetUrl: "$GH_URL"
}
}
}
stage('Test on Fedora Rawhide') {
agent {label "sssd-ci"}
environment {
TEST_SYSTEM = "fedora-rawhide"
GH_CONTEXT = "$GH_CONTEXT/fedora-rawhide"
GH_URL = "$AWS_BASE/$BRANCH_NAME/$BUILD_ID/$TEST_SYSTEM/index.html"
CONFIG = "$BASE_DIR/configs/${TEST_SYSTEM}.json"
}
steps {
githubNotify status: 'PENDING', context: "$GH_CONTEXT", description: "$GH_PENDING", targetUrl: "$GH_URL"
sh '$RUN "$WORKSPACE/sssd" "$SUITE_DIR" "$WORKSPACE/artifacts/$TEST_SYSTEM" "$CONFIG"'
}
post {
always {
archiveArtifacts artifacts: "artifacts/**", allowEmptyArchive: true
sh '$ARCHIVE $TEST_SYSTEM $WORKSPACE/artifacts/$TEST_SYSTEM $NAME'
sh 'rm -fr "$WORKSPACE/artifacts/$TEST_SYSTEM"'
}
failure {
githubNotify status: 'FAILURE', context: "$GH_CONTEXT", description: "$GH_FAILURE", targetUrl: "$GH_URL"
}
success {
githubNotify status: 'SUCCESS', context: "$GH_CONTEXT", description: "$GH_SUCCESS", targetUrl: "$GH_URL"
}
}
}
}
}
}
post {
failure {
script {
if (untrusted) {
githubNotify status: 'ERROR', context: "$GH_CONTEXT", description: 'Untrusted files were modified.', targetUrl: "$GH_URL"
} else {
githubNotify status: 'FAILURE', context: "$GH_CONTEXT", description: 'Some tests failed.', targetUrl: "$GH_URL"
}
}
}
aborted {
githubNotify status: 'ERROR', context: "$GH_CONTEXT", description: 'Aborted.', targetUrl: "$GH_URL"
}
success {
githubNotify status: 'SUCCESS', context: "$GH_CONTEXT", description: 'All tests succeeded', targetUrl: "$GH_URL"
}
}
}