Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • mika/sssd
  • guillem/debian-pkg-sssd
  • john.veitch/sssd
  • jgullberg/sssd
  • gioele/sssd
  • oktay454/sssd
  • sergiodj/sssd
  • 3v1n0/sssd
  • jfalk-guest/sssd
  • sathieu/sssd
  • dpward/sssd
  • sssd-team/sssd
  • ahasenack/sssd
  • jbicha/sssd
  • yrro-guest/sssd
15 results
Show changes
Commits on Source (96)
Showing with 3998 additions and 4372 deletions
name: "Analyze (target)"
on:
pull_request_target:
branches: [master]
branches: [master, sssd-2-7]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
......@@ -81,3 +81,21 @@ jobs:
name: covscan
path: |
./logs/*.err
flake8:
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v2
with:
ref: ${{ github.event.pull_request.head.sha }}
persist-credentials: false
- name: Run flake8
uses: grantmcconnaughey/lintly-flake8-github-action@d9db4fd0be9fb1cd19206a48ec0773bd93b82cbd
with:
token: ${{ secrets.GITHUB_TOKEN }}
failIf: new
name: "ci"
on:
push:
branches: [master]
branches: [master, sssd-2-7]
pull_request:
branches: [master]
branches: [master, sssd-2-7]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
......@@ -29,14 +29,14 @@ jobs:
working-directory: x86_64
run: |
source ../contrib/fedora/bashrc_sssd
make CFLAGS+="$SSS_WARNINGS -Werror"
make CFLAGS+="$SSS_WARNINGS -Werror -Wno-error=deprecated-declarations"
- name: make check
shell: bash
working-directory: x86_64
run: |
source ../contrib/fedora/bashrc_sssd
make CFLAGS+="$SSS_WARNINGS -Werror" check
make CFLAGS+="$SSS_WARNINGS -Werror -Wno-error=deprecated-declarations" check
- name: make distcheck
shell: bash
......@@ -211,17 +211,19 @@ jobs:
pip3 install -r ./sssd/src/tests/multihost/requirements.txt
- name: Create multihost configuration
run: |
cat <<EOF > mhc.yml
root_password: 'Secret123'
domains:
- name: tier0.tests
type: sssd
hosts:
- name: client
external_hostname: client.test
role: master
EOF
uses: DamianReeves/write-file-action@41569a7dac64c252caacca7bceefe28b70b38db1
with:
path: mhc.yml
write-mode: overwrite
contents: |
root_password: 'Secret123'
domains:
- name: tier0.tests
type: sssd
hosts:
- name: client
external_hostname: client.test
role: master
- name: Run basic multihost tests
run: |
......
......@@ -21,6 +21,7 @@
name: copr
on:
pull_request_target:
branches: [master, sssd-2-7]
types: [opened, synchronize, reopened]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
......@@ -68,7 +69,7 @@ jobs:
uses: next-actions/copr/filter-chroots@master
with:
coprcfg: ${{ secrets.COPR_SECRETS }}
filter: "fedora-.+-x86_64|centos-stream-9-x86_64"
filter: "fedora-.+-x86_64|centos-stream-*-x86_64"
exclude: "fedora-eln-.+"
- name: Create copr project
......@@ -93,6 +94,13 @@ jobs:
env:
coprcfg: ${{ steps.copr.outputs.coprcfg }}
run: |
# CentOS Stream 8
copr-cli --config "$coprcfg" edit-chroot \
--repos 'https://koji.mbox.centos.org/kojifiles/repos/dist-c8-stream-build/latest/$basearch/' \
--modules idm:DL1
$COPR_ACCOUNT/$COPR_PROJECT/centos-stream-8-x86_64
# CentOS Stream 9
copr-cli --config "$coprcfg" edit-chroot \
--repos 'https://kojihub.stream.centos.org/kojifiles/repos/c9s-build/latest/$basearch/' \
$COPR_ACCOUNT/$COPR_PROJECT/centos-stream-9-x86_64
......
name: copr cleanup
on:
pull_request_target:
branches: [master, sssd-2-7]
types: [closed]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
......
name: "Static code analysis"
on:
push:
branches: [master]
branches: [master, sssd-2-7]
pull_request:
branches: [master]
branches: [master, sssd-2-7]
schedule:
# Everyday at midnight
- cron: '0 0 * * *'
......@@ -24,7 +24,7 @@ jobs:
uses: ./.github/actions/install-dependencies
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
uses: github/codeql-action/init@v2
with:
languages: cpp, python
queries: +security-and-quality
......@@ -39,17 +39,4 @@ jobs:
make -j$PROCESSORS
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1
flake8:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name:
uses: ikerexxe/lintly-flake8-github-action@fix_execution
if: github.event_name == 'pull_request'
with:
token: ${{ secrets.GITHUB_TOKEN }}
failIf: new
uses: github/codeql-action/analyze@v2
import hudson.AbortException
import org.jenkinsci.plugins.workflow.steps.FlowInterruptedException
/* Send notifications to Github.
* If it is an on-demand run then no notifications are sent.
*/
class Notification {
def pipeline
String context
String details_url
String aws_url
boolean on_demand
/* @param pipeline Jenkins pipeline context.
* @param context Github notification context (the bold text).
* @param details_url Link for "details" button.
* @param aws_url Link to cloud where logs are stored.
* @param on_demand True if this is an on-demand run.
*
* There are two types of notifications:
* a) Summary (i.e. sssd-ci: Success. details: @details_url)
* b) Single build (i.e. sssd-ci/fedora35: Success. details: @aws_url)
*/
Notification(pipeline, context, details_url, aws_url, on_demand) {
this.pipeline = pipeline
this.context = context
this.details_url = details_url
this.aws_url = aws_url
this.on_demand = on_demand
}
/* Send notification. If system is not null single build is notified. */
def notify(status, message, system = null) {
def context = system ? "${this.context}/${system}" : this.context
this.pipeline.echo "[${context}] ${status}: ${message}"
if (this.on_demand) {
return
}
this.send(status, message, context, this.getTargetURL(system))
}
private def send(status, message, context, url) {
this.pipeline.githubNotify status: status,
context: context,
description: message,
targetUrl: url
}
private def getTargetURL(system) {
if (system) {
return String.format(
'%s/%s/%s/%s/index.html',
this.aws_url,
this.pipeline.env.BRANCH_NAME,
this.pipeline.env.BUILD_ID,
system
)
}
return this.details_url
}
}
/* Manage test run. */
class Test {
def pipeline
String system
Notification notification
String artifactsdir
String basedir
String codedir
String target
/* @param pipeline Jenkins pipeline context.
* @param system System to test on.
* @param notification Notification object.
*/
Test(pipeline, system, notification) {
this.pipeline = pipeline
this.system = system
this.notification = notification
this.basedir = "/home/fedora"
this.target = pipeline.env.CHANGE_TARGET
}
def handleCmdError(rc) {
if (rc == 255) {
this.pipeline.error "Timeout reached."
} else if (rc != 0) {
this.pipeline.error "Some tests failed."
}
}
/* Test entry point. */
def run(command=null) {
/* These needs to be set here in order to get correct workspace. */
this.artifactsdir = "${this.pipeline.env.WORKSPACE}/artifacts/${this.system}"
this.codedir = "${this.pipeline.env.WORKSPACE}/sssd"
/* Clean-up previous artifacts just to be sure there are no leftovers. */
this.pipeline.sh "rm -fr ${this.artifactsdir} || :"
try {
this.pipeline.echo "Running on ${this.pipeline.env.NODE_NAME}"
this.notify('PENDING', 'Build is in progress.')
this.checkout()
try {
this.rebase()
} catch (e) {
this.pipeline.error "Unable to rebase on ${this.target}."
}
this.pipeline.echo "Executing tests, started at ${this.getCurrentTime()}"
if (command == null) {
command = String.format(
'%s/sssd-test-suite -c "%s" run --sssd "%s" --artifacts "%s" --update --prune',
"${this.basedir}/sssd-test-suite",
"${this.basedir}/configs/${this.system}.json",
this.codedir,
this.artifactsdir
)
}
def rc = this.pipeline.sh script: command, returnStatus: true
this.handleCmdError(rc)
this.pipeline.echo "Finished at ${this.getCurrentTime()}"
this.notify('SUCCESS', 'Success.')
} catch (FlowInterruptedException e) {
this.notify('ERROR', 'Aborted.')
throw e
} catch (AbortException e) {
this.notify('ERROR', e.getMessage())
throw e
} catch (e) {
this.notify('ERROR', 'Build failed.')
throw e
} finally {
this.archive()
}
}
def getCurrentTime() {
def date = new Date()
return date.format('dd. MM. yyyy HH:mm:ss')
}
def checkout() {
this.pipeline.dir('sssd') {
this.pipeline.checkout this.pipeline.scm
}
}
def rebase() {
/* Do not rebase if there is no target (not a pull request). */
if (!this.target) {
return
}
this.pipeline.echo "Rebasing on ${this.target}"
// Fetch refs
this.git(String.format(
"fetch --no-tags --progress origin +refs/heads/%s:refs/remotes/origin/%s",
this.target, this.target
))
// Remove left overs from previous rebase if there are any
this.git("rebase --abort || :")
// Just to be sure
this.pipeline.sh "rm -fr '${this.codedir}/.git/rebase-apply' || :"
// Rebase
this.git("rebase origin/${this.target}")
}
def git(command) {
this.pipeline.sh "git -C '${this.codedir}' ${command}"
}
def archive() {
this.pipeline.archiveArtifacts artifacts: "artifacts/**",
allowEmptyArchive: true
this.pipeline.sh String.format(
'%s/sssd-ci archive --name "%s" --system "%s" --artifacts "%s"',
"${this.basedir}/sssd-ci",
"${pipeline.env.BRANCH_NAME}/${pipeline.env.BUILD_ID}",
this.system,
"${artifactsdir}"
)
this.pipeline.sh "rm -fr ${this.artifactsdir}"
}
def notify(status, message) {
this.notification.notify(status, message, this.system)
}
}
/* Manage test run for on demand test. */
class OnDemandTest extends Test {
String repo
String branch
/* @param pipeline Jenkins pipeline context.
* @param system System to test on.
* @param notification Notification object.
* @param repo Repository fetch URL.
* @param branch Branch to checkout.
*/
OnDemandTest(pipeline, system, notification, repo, branch) {
super(pipeline, system, notification)
this.repo = repo
this.branch = branch
}
def handleCmdError(rc) {
super.handleCmdError(rc)
}
def run() {
this.pipeline.echo "Repository: ${this.repo}"
this.pipeline.echo "Branch: ${this.branch}"
super.run()
}
def checkout() {
this.pipeline.dir('sssd') {
this.pipeline.git branch: this.branch, url: this.repo
}
}
def rebase() {
/* Do nothing. */
}
def archive() {
this.pipeline.echo 'On demand run. Artifacts are not stored in the cloud.'
this.pipeline.echo 'They are accessible only from Jenkins.'
this.pipeline.echo "${this.pipeline.env.BUILD_URL}/artifact/artifacts/${this.system}"
this.pipeline.archiveArtifacts artifacts: "artifacts/**",
allowEmptyArchive: true
this.pipeline.sh "rm -fr ${this.artifactsdir}"
}
}
/* Manage test run for internal covscan test.
* Can be triggered for PRs, ondemand and branch runs */
class Covscan extends Test {
String repo
String branch
String basedir
String pr_number
boolean on_demand
String artifactsdir
/* @param pipeline Jenkins pipeline context.
* @param notification Notification object.
* @param repo Repository fetch URL.
* @param branch Branch to checkout.
* @param pr_number Pull Request Number, null if not inside a PR.
* @param on_demand true for on_demand runs, false otherwise.
*/
Covscan(pipeline, notification, repo, branch, pr_number, on_demand) {
super(pipeline, "covscan", notification)
this.repo = repo
this.branch = branch
this.pr_number = pr_number
this.basedir = "/home/fedora"
this.on_demand = on_demand
}
/* Errors returned from covscan.sh */
def handleCmdError(rc) {
if (rc == 0) { return }
switch (rc) {
case 1:
this.pipeline.error "Covscan diff shows new errors!"
break
case 2:
this.pipeline.error "Covscan task FAILED"
break
case 3:
this.pipeline.error "Covscan task INTERRUPTED"
break
case 4:
this.pipeline.error "Covscan task CANCELLED"
break
case 255:
this.pipeline.error "Timeout reached."
break
default:
this.pipeline.error "Generic Failure, unknown return code"
break
}
}
def run() {
def version = this.pr_number ? this.pr_number : this.branch.trim()
this.pipeline.echo "Executing covscan script with version: ${version}_${this.pipeline.env.BUILD_ID}"
def command = String.format(
'%s/scripts/covscan.sh "%s%s_%s" "%s"',
this.basedir,
this.pr_number ? "pr" : "",
version,
this.pipeline.env.BUILD_ID,
this.pipeline.env.WORKSPACE,
)
super.run(command)
}
def checkout() {
if (on_demand) {
this.pipeline.echo "Checkout ${this.branch}"
this.pipeline.dir('sssd') {
this.pipeline.git branch: this.branch, url: this.repo
}
} else {
this.pipeline.dir('sssd') {
this.pipeline.checkout this.pipeline.scm
}
}
}
def rebase() {
super.rebase()
}
def archive() {
if (on_demand) {
this.pipeline.echo 'On demand run. Artifacts are not stored in the cloud.'
this.pipeline.echo 'They are accessible only from Jenkins.'
this.pipeline.echo "${this.pipeline.env.BUILD_URL}/artifact/artifacts/${this.system}"
this.pipeline.archiveArtifacts artifacts: "artifacts/**",
allowEmptyArchive: true
this.pipeline.sh "rm -fr ${this.artifactsdir}"
} else {
super.archive()
}
}
def notify(status, message) {
this.notification.notify(status, message, "covscan")
}
}
def systems = []
def pr_labels = []
def with_tests_label = false
def with_tests_title = false
def on_demand = params.ON_DEMAND ? true : false
def notification = new Notification(
this, 'sssd-ci',
'https://github.com/SSSD/sssd/blob/master/contrib/test-suite/README.md',
'https://s3.eu-central-1.amazonaws.com/sssd-ci',
on_demand
)
this.properties([
buildDiscarder(logRotator(daysToKeepStr: '30', numToKeepStr: '70')),
])
try {
stage('Get system list') {
node('master') {
if (params.SYSTEMS && params.SYSTEMS != 'all') {
/* This is a parametrized custom build. System list is taken
* from provided parameter. */
systems = params.SYSTEMS.split()
} else {
/* This is automated build or custom build that requested
* tests on all systems (i.e. same systems as master branch) */
def branch = env.CHANGE_TARGET ? env.CHANGE_TARGET : 'master'
def config = "systems-${branch}"
echo "Using configuration: ${config}"
/* Configuration is read from Jenkins-managed configuration file.
* Path to the configuration is loaded into env.CONFIG_PATH */
configFileProvider([
configFile(fileId: config, variable: 'CONFIG_PATH')
]) {
def contents = readFile "${env.CONFIG_PATH}"
systems = contents.split()
}
}
echo 'Test will be done on following systems:'
echo systems.join(', ')
}
}
/* Setup nice build description so pull request are easy to find. */
stage('Setup description') {
node('master') {
if (on_demand) {
/* user: branch */
def build = currentBuild.rawBuild
def cause = build.getCause(hudson.model.Cause.UserIdCause.class)
def user = cause.getUserId()
currentBuild.description = "${user}: ${params.REPO_BRANCH}"
} else {
if (env.CHANGE_TARGET) {
/* PR XXX: pull request name */
def title = sh returnStdout: true, script: """
curl -s https://api.github.com/repos/SSSD/sssd/pulls/${env.CHANGE_ID} | \
python -c "import sys, json; print(json.load(sys.stdin).get('title'))"
"""
currentBuild.description = "PR ${env.CHANGE_ID}: ${title}"
if (title.toLowerCase().contains('tests: ')) {
with_tests_title = true
}
} else {
/* Branch: name */
currentBuild.description = "Branch: ${env.BRANCH_NAME}"
}
}
}
}
stage('Retrieve labels') {
node('master') {
if (env.CHANGE_TARGET) {
def labels = sh returnStdout: true, script: """
curl -s https://api.github.com/repos/SSSD/sssd/pulls/${env.CHANGE_ID}
"""
def props = readJSON text: labels
props['labels'].each { key, value ->
pr_labels.add(key['name'])
if (key['name'] == 'Tests') {
with_tests_label = true
}
}
}
}
}
stage('Prepare systems') {
notification.notify('PENDING', 'Pending.')
/* Notify that all systems are pending. */
for (system in systems) {
notification.notify('PENDING', 'Awaiting executor', system)
}
if ((with_tests_label == false) && (with_tests_title == false)) {
notification.notify('PENDING', 'Pending.', "covscan")
}
}
/* Run tests on multiple systems in parallel. */
stage('Run Tests') {
def stages = [:]
for (system in systems) {
def test = null
if (!on_demand) {
test = new Test(this, system, notification)
} else {
test = new OnDemandTest(
this, system, notification,
params.REPO_URL, params.REPO_BRANCH
)
}
stages.put("${system}", {
node("sssd-ci") {
stage("${system}") {
test.run()
}
}
})
}
/* Run covscan against non-test related PRs */
if ((with_tests_label == false) && (with_tests_title == false)) {
stages.put("covscan", {
node("sssd-ci") {
stage("covscan") {
covscan = new Covscan(this, notification, params.REPO_URL, params.REPO_BRANCH, env.CHANGE_ID, on_demand)
covscan.run()
}
}
})
}
parallel(stages)
}
stage('Report results') {
notification.notify('SUCCESS', 'All tests succeeded.')
}
} catch (FlowInterruptedException e) {
stage('Report results') {
notification.notify('ERROR', 'Aborted.')
throw e
}
} catch (e) {
stage('Report results') {
notification.notify('ERROR', 'Some tests failed.')
throw e
}
}
......@@ -1621,6 +1621,7 @@ sssd_pac_SOURCES = \
src/responder/pac/pacsrv.c \
src/responder/pac/pacsrv_cmd.c \
src/providers/ad/ad_pac_common.c \
src/util/pac_utils.c \
$(SSSD_RESPONDER_OBJ)
sssd_pac_CFLAGS = \
$(AM_CFLAGS) \
......@@ -4236,6 +4237,7 @@ libsss_krb5_common_la_SOURCES = \
src/util/sss_krb5.c \
src/util/sss_iobuf.c \
src/util/become_user.c \
src/util/pac_utils.c \
$(NULL)
libsss_krb5_common_la_CFLAGS = \
$(AM_CFLAGS) \
......@@ -5375,9 +5377,12 @@ if BUILD_PYTHON2_BINDINGS
rm -f $(builddir)/src/config/SSSDConfig/sssdoptions.py ; \
fi
rm -f $(builddir)/src/config/SSSDConfig/*.pyc
cd $(builddir)/src/config; $(PYTHON2) setup.py build --build-base $(abs_builddir)/src/config clean --all
rm -fr "$(builddir)/src/config/dist"
rm -fr "$(builddir)/src/config/SSSDConfig.egg-info"
rm -fr "$(builddir)/src/config/SSSDConfig/__pycache__"
find "$(builddir)/src/config/SSSDConfig" -name "*.py[co]" -delete
endif
if BUILD_PYTHON3_BINDINGS
if [ ! $(srcdir)/src/config/SSSDConfig/ipachangeconf.py -ef $(builddir)/src/config/SSSDConfig/ipachangeconf.py ]; then \
......@@ -5388,9 +5393,12 @@ if BUILD_PYTHON3_BINDINGS
rm -f $(builddir)/src/config/SSSDConfig/sssdoptions.py ; \
fi
rm -f $(builddir)/src/config/SSSDConfig/__pycache__/*.pyc
cd $(builddir)/src/config; $(PYTHON3) setup.py build --build-base $(abs_builddir)/src/config clean --all
rm -fr "$(builddir)/src/config/dist"
rm -fr "$(builddir)/src/config/SSSDConfig.egg-info"
rm -fr "$(builddir)/src/config/SSSDConfig/__pycache__"
find "$(builddir)/src/config/SSSDConfig" -name "*.py[co]" -delete
endif
for doc in $(SSSD_DOCS); do \
rm -Rf $$doc; \
......
......@@ -311,7 +311,6 @@ License: GPLv3+
Requires: samba-client-libs >= %{samba_package_version}
Requires: sssd-common = %{version}-%{release}
Requires: sssd-krb5-common = %{version}-%{release}
Requires: sssd-idp = %{version}-%{release}
Requires: libipa_hbac%{?_isa} = %{version}-%{release}
Requires: libsss_certmap = %{version}-%{release}
Recommends: bind-utils
......@@ -504,13 +503,14 @@ An implementation of a Kerberos KCM server. Use this package if you want to
use the KCM: Kerberos credentials cache.
%package idp
Summary: Kerberos plugins for external identity providers.
Summary: Kerberos plugins and OIDC helper for external identity providers.
License: GPLv3+
Requires: sssd-common = %{version}-%{release}
%description idp
This package provides Kerberos plugins that are required to enable
authentication against external identity providers.
authentication against external identity providers. Additionally a helper
program to handle the OAuth 2.0 Device Authorization Grant is provided.
%prep
%autosetup -p1
......@@ -598,7 +598,7 @@ rm -Rf ${RPM_BUILD_ROOT}/%{_docdir}/%{name}
# Older versions of rpmbuild can only handle one -f option
# So we need to append to the sssd*.lang file
for file in `ls $RPM_BUILD_ROOT/%{python3_sitelib}/*.egg-info 2> /dev/null`
for file in `find $RPM_BUILD_ROOT/%{python3_sitelib} -maxdepth 1 -name "*.egg-info" 2> /dev/null`
do
echo %{python3_sitelib}/`basename $file` >> python3_sssdconfig.lang
done
......@@ -877,6 +877,7 @@ done
%{_mandir}/man8/pam_sss.8*
%{_mandir}/man8/pam_sss_gss.8*
%{_mandir}/man8/sssd_krb5_locator_plugin.8*
%{_mandir}/man8/sssd_krb5_localauth_plugin.8*
%files -n libsss_sudo
%license src/sss_client/COPYING
......
sssd (2.7.0-1) UNRELEASED; urgency=medium
sssd (2.7.1-1) UNRELEASED; urgency=medium
* New upstream release.
* control: Drop sssd-ipd from sssd-ipa depends.
* sssd-common.install: Add a new manpage.
-- Timo Aaltonen <tjaalton@debian.org> Thu, 02 Jun 2022 19:36:04 +0300
sssd (2.7.0-1) unstable; urgency=medium
* New upstream release.
* Update signing-key.asc.
......@@ -7,7 +15,7 @@ sssd (2.7.0-1) UNRELEASED; urgency=medium
external identity providers.
* control, rules: Enable krb5 config snippets by default.
-- Timo Aaltonen <tjaalton@debian.org> Wed, 25 May 2022 09:48:50 +0300
-- Timo Aaltonen <tjaalton@debian.org> Wed, 25 May 2022 12:59:05 +0300
sssd (2.6.3-3) unstable; urgency=medium
......
......@@ -168,7 +168,6 @@ Depends:
libsss-idmap0 (= ${binary:Version}),
sssd-ad-common (= ${binary:Version}),
sssd-common (= ${binary:Version}),
sssd-idp (= ${binary:Version}),
sssd-krb5-common (= ${binary:Version}),
${misc:Depends},
${shlibs:Depends}
......
......@@ -60,6 +60,7 @@ usr/share/man/man5/sssd-systemtap.5*
usr/share/man/man5/sssd.conf.5*
usr/share/man/man8/idmap_sss.8*
usr/share/man/man8/sssd.8*
usr/share/man/man8/sssd_krb5_localauth_plugin.8
usr/share/man/man8/sssd_krb5_locator_plugin.8*
usr/share/polkit-1/rules.d/sssd-pcsc.rules
usr/share/sssd/cfg_rules.ini
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.