Update
This commit is contained in:
parent
12b76e0c7a
commit
27c4ec74a1
8
.editorconfig
Normal file
8
.editorconfig
Normal file
@ -0,0 +1,8 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
indent_style = space
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
51
.gitignore
vendored
Normal file
51
.gitignore
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
cscope.in.out
|
||||
cscope.out
|
||||
cscope.po.out
|
||||
__pycache__
|
||||
prefix/
|
||||
.gdbinit
|
||||
|
||||
# Symlinks
|
||||
/gstreamer
|
||||
/gst-plugins-base
|
||||
/gst-plugins-good
|
||||
/libnice
|
||||
/gst-plugins-bad
|
||||
/gst-plugins-ugly
|
||||
/gst-libav
|
||||
/gst-rtsp-server
|
||||
/gst-devtools
|
||||
/gst-integration-testsuites
|
||||
/gst-editing-services
|
||||
/gstreamer-vaapi
|
||||
/gst-omx
|
||||
/gstreamer-sharp
|
||||
/pygobject
|
||||
/gst-python
|
||||
/gst-examples
|
||||
/gst-plugins-rs
|
||||
|
||||
# Because of unwanted diff caused by redirect wrap files
|
||||
# https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/1140
|
||||
subprojects/*.wrap
|
||||
|
||||
subprojects/*/
|
||||
!subprojects/gst-devtools
|
||||
!subprojects/gst-docs
|
||||
!subprojects/gst-editing-services
|
||||
!subprojects/gst-examples
|
||||
!subprojects/gst-integration-testsuites
|
||||
!subprojects/gst-libav
|
||||
!subprojects/gst-omx
|
||||
!subprojects/gst-plugins-bad
|
||||
!subprojects/gst-plugins-base
|
||||
!subprojects/gst-plugins-good
|
||||
!subprojects/gst-plugins-ugly
|
||||
!subprojects/gst-python
|
||||
!subprojects/gstreamer
|
||||
!subprojects/gstreamer-sharp
|
||||
!subprojects/gstreamer-vaapi
|
||||
!subprojects/gst-rtsp-server
|
||||
!subprojects/macos-bison-binary
|
||||
!subprojects/win-flex-bison-binaries
|
||||
!subprojects/win-nasm
|
788
.gitlab-ci.yml
Normal file
788
.gitlab-ci.yml
Normal file
@ -0,0 +1,788 @@
|
||||
include:
|
||||
- remote: "https://gitlab.freedesktop.org/freedesktop/ci-templates/-/raw/14731f78c23c7b523a85a26a068ade9ac1ecd2f3/templates/fedora.yml"
|
||||
- remote: "https://gitlab.freedesktop.org/freedesktop/ci-templates/-/raw/14731f78c23c7b523a85a26a068ade9ac1ecd2f3/templates/debian.yml"
|
||||
|
||||
stages:
|
||||
- 'trigger'
|
||||
- 'build docker'
|
||||
- 'preparation'
|
||||
- 'pre-build'
|
||||
- 'build'
|
||||
- 'test'
|
||||
# Use the resulting binaries
|
||||
- 'integrate'
|
||||
|
||||
variables:
|
||||
GIT_DEPTH: 1
|
||||
|
||||
# Branch to track for modules that have no ref specified in the manifest
|
||||
GST_UPSTREAM_BRANCH: 'main'
|
||||
ORC_UPSTREAM_BRANCH: 'master'
|
||||
|
||||
###
|
||||
# IMPORTANT
|
||||
# These are the version tags for the docker images the CI runs against.
|
||||
# If you are hacking on them or need a them to rebuild, its enough
|
||||
# to change any part of the string of the image you want.
|
||||
###
|
||||
FEDORA_TAG: '2021-12-03.1'
|
||||
INDENT_TAG: '2021-10-04.0'
|
||||
WINDOWS_TAG: "2022-01-26.3"
|
||||
|
||||
GST_UPSTREAM_REPO: 'gstreamer/gstreamer'
|
||||
FDO_UPSTREAM_REPO: 'gstreamer/gstreamer'
|
||||
|
||||
FEDORA_AMD64_SUFFIX: 'amd64/fedora'
|
||||
INDENT_AMD64_SUFFIX: 'amd64/gst-indent'
|
||||
WINDOWS_AMD64_SUFFIX: 'amd64/windows'
|
||||
WINDOWS_RUST_AMD64_SUFFIX: 'amd64/windows-rust'
|
||||
|
||||
FEDORA_DOCS_IMAGE: "registry.freedesktop.org/gstreamer/gst-ci/amd64/fedora:2020-07-03.0-master"
|
||||
WINDOWS_IMAGE: "$CI_REGISTRY_IMAGE/$WINDOWS_AMD64_SUFFIX:$WINDOWS_TAG-$GST_UPSTREAM_BRANCH"
|
||||
WINDOWS_UPSTREAM_IMAGE: "$CI_REGISTRY/$GST_UPSTREAM_REPO/$WINDOWS_AMD64_SUFFIX:$WINDOWS_TAG-$GST_UPSTREAM_BRANCH"
|
||||
|
||||
RUST_MINIMUM_VERSION: '1.54.0'
|
||||
RUST_LATEST_VERSION: '1.55.0'
|
||||
|
||||
WINDOWS_RUST_MINIMUM_IMAGE: "$CI_REGISTRY_IMAGE/$WINDOWS_RUST_AMD64_SUFFIX:$WINDOWS_TAG-$GST_UPSTREAM_BRANCH-rust-$RUST_MINIMUM_VERSION"
|
||||
WINDOWS_RUST_MINIMUM_UPSTREAM_IMAGE: "$CI_REGISTRY/$GST_UPSTREAM_REPO/$WINDOWS_RUST_AMD64_SUFFIX:$WINDOWS_TAG-$GST_UPSTREAM_BRANCH-rust-$RUST_MINIMUM_VERSION"
|
||||
|
||||
WINDOWS_RUST_LATEST_IMAGE: "$CI_REGISTRY_IMAGE/$WINDOWS_RUST_AMD64_SUFFIX:$WINDOWS_TAG-$GST_UPSTREAM_BRANCH-rust-$RUST_LATEST_VERSION"
|
||||
WINDOWS_RUST_LATEST_UPSTREAM_IMAGE: "$CI_REGISTRY/$GST_UPSTREAM_REPO/$WINDOWS_RUST_AMD64_SUFFIX:$WINDOWS_TAG-$GST_UPSTREAM_BRANCH-rust-$RUST_LATEST_VERSION"
|
||||
|
||||
MESON_BUILDTYPE_ARGS: --default-library=both
|
||||
DEFAULT_MESON_ARGS: >
|
||||
-Dlibnice:tests=disabled
|
||||
-Dlibnice:examples=disabled
|
||||
-Dopenh264:tests=disabled
|
||||
-Dpygobject:tests=false
|
||||
-Dpython=enabled
|
||||
-Dlibav=enabled
|
||||
-Dugly=enabled
|
||||
-Dbad=enabled
|
||||
-Ddevtools=enabled
|
||||
-Dges=enabled
|
||||
-Drtsp_server=enabled
|
||||
-Dvaapi=enabled
|
||||
-Dsharp=disabled
|
||||
-Dgpl=enabled
|
||||
|
||||
MESON_GST_WERROR: >
|
||||
-Dgstreamer:werror=true
|
||||
-Dgst-plugins-base:werror=true
|
||||
-Dgst-plugins-good:werror=true
|
||||
-Dgst-plugins-ugly:werror=true
|
||||
-Dgst-plugins-bad:werror=true
|
||||
-Dgst-rtsp-server:werror=true
|
||||
-Dgst-libav:werror=true
|
||||
-Dgst-examples:werror=true
|
||||
-Dgst-editing-services:werror=true
|
||||
-Dgst-docs:werror=true
|
||||
-Dgst-omx:werror=true
|
||||
-Dgst-devtools:werror=true
|
||||
-Dgst-python:werror=true
|
||||
-Dgstreamer-vaapi:werror=true
|
||||
-Dgstreamer-sharp:werror=true
|
||||
|
||||
workflow:
|
||||
# https://docs.gitlab.com/ee/ci/yaml/index.html#switch-between-branch-pipelines-and-merge-request-pipelines
|
||||
rules:
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
variables:
|
||||
GIT_FETCH_EXTRA_FLAGS: '--no-tags'
|
||||
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_PIPELINE_SOURCE == "push"
|
||||
when: never
|
||||
- if: '$CI_COMMIT_TAG'
|
||||
- if: '$CI_COMMIT_BRANCH'
|
||||
variables:
|
||||
GIT_FETCH_EXTRA_FLAGS: '--no-tags'
|
||||
|
||||
#
|
||||
# Global CI policy
|
||||
#
|
||||
# This can be used to configure global behaviour our our jobs.
|
||||
#
|
||||
default:
|
||||
retry:
|
||||
max: 2
|
||||
when:
|
||||
- 'runner_system_failure'
|
||||
- 'stuck_or_timeout_failure'
|
||||
- 'scheduler_failure'
|
||||
- 'api_failure'
|
||||
interruptible: true
|
||||
|
||||
# This is an empty job that is used to trigger the pipeline.
|
||||
trigger:
|
||||
image: alpine:latest
|
||||
stage: 'trigger'
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
script:
|
||||
- echo "Trigger job done, now running the pipeline."
|
||||
rules:
|
||||
# If the MR is assigned to the Merge bot, trigger the pipeline automatically
|
||||
- if: '$CI_MERGE_REQUEST_ASSIGNEES == "gstreamer-merge-bot"'
|
||||
# Alway run tests post merged
|
||||
- if: '$CI_PROJECT_NAMESPACE == "gstreamer" && $CI_COMMIT_BRANCH == $GST_UPSTREAM_BRANCH'
|
||||
# When the assignee isn't the merge bot, require an explicit action to trigger the pipeline
|
||||
# to avoid wasting CI resources
|
||||
- if: '$CI_MERGE_REQUEST_ASSIGNEES != "gstreamer-merge-bot"'
|
||||
when: 'manual'
|
||||
allow_failure: false
|
||||
|
||||
.fedora image:
|
||||
variables:
|
||||
FDO_DISTRIBUTION_VERSION: '31'
|
||||
FDO_REPO_SUFFIX: "$FEDORA_AMD64_SUFFIX"
|
||||
FDO_DISTRIBUTION_TAG: "$FEDORA_TAG-$GST_UPSTREAM_BRANCH"
|
||||
FDO_DISTRIBUTION_EXEC: 'GIT_BRANCH=$CI_COMMIT_REF_NAME GIT_URL=$CI_REPOSITORY_URL bash ci/docker/fedora/prepare.sh'
|
||||
|
||||
fedora amd64 docker:
|
||||
extends:
|
||||
- '.fedora image'
|
||||
- '.fdo.container-build@fedora'
|
||||
stage: 'build docker'
|
||||
needs:
|
||||
- "trigger"
|
||||
tags:
|
||||
- 'packet.net'
|
||||
|
||||
.gst-indent image:
|
||||
variables:
|
||||
FDO_DISTRIBUTION_VERSION: 'stretch'
|
||||
FDO_REPO_SUFFIX: "$INDENT_AMD64_SUFFIX"
|
||||
FDO_DISTRIBUTION_TAG: "$INDENT_TAG-$GST_UPSTREAM_BRANCH"
|
||||
FDO_DISTRIBUTION_PACKAGES: 'curl indent git findutils'
|
||||
|
||||
gst-indent amd64 docker:
|
||||
extends:
|
||||
- '.gst-indent image'
|
||||
- '.fdo.container-build@debian'
|
||||
stage: 'build docker'
|
||||
# Do not depend on the trigger, as we want to run indent always
|
||||
needs: []
|
||||
|
||||
windows amd64 docker:
|
||||
stage: "build docker"
|
||||
needs:
|
||||
- "trigger"
|
||||
variables:
|
||||
# Unlike the buildah/linux jobs, this file
|
||||
# needs to be relative to docker/windows/ subdir
|
||||
# as it makes life easier in the powershell script
|
||||
#
|
||||
# We also don't need a CONTEXT_DIR var as its also
|
||||
# hardcoded to be docker/windows/
|
||||
DOCKERFILE: "ci/docker/windows/Dockerfile"
|
||||
tags:
|
||||
- windows
|
||||
- shell
|
||||
- "1809"
|
||||
script:
|
||||
# We need to pass an array and to resolve the env vars, so we can't use a variable:
|
||||
- $DOCKER_BUILD_ARGS = @("--build-arg", "DEFAULT_BRANCH=$GST_UPSTREAM_BRANCH")
|
||||
|
||||
- "& ci/docker/windows/container.ps1 $CI_REGISTRY $CI_REGISTRY_USER $CI_REGISTRY_PASSWORD $WINDOWS_IMAGE $WINDOWS_UPSTREAM_IMAGE $DOCKERFILE"
|
||||
- |
|
||||
if (!($?)) {
|
||||
echo "Failed to build the image"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
.windows rust docker build:
|
||||
stage: 'build docker'
|
||||
needs:
|
||||
- job: 'windows amd64 docker'
|
||||
artifacts: false
|
||||
rules:
|
||||
- if: '$CI_PROJECT_NAME == "gst-ci"'
|
||||
variables:
|
||||
# Unlike the buildah/linux jobs, this file
|
||||
# needs to be relative to docker/windows/ subdir
|
||||
# as it makes life easier in the powershell script
|
||||
#
|
||||
# We also don't need a CONTEXT_DIR var as its also
|
||||
# hardcoded to be docker/windows/
|
||||
DOCKERFILE: 'docker/windows/rust.Dockerfile'
|
||||
tags:
|
||||
- 'windows'
|
||||
- 'shell'
|
||||
- '1809'
|
||||
script:
|
||||
# We need to pass an array and to resolve the env vars, so we can't use a variable:
|
||||
- $DOCKER_BUILD_ARGS = @("--build-arg", "DEFAULT_BRANCH=$GST_UPSTREAM_BRANCH", "--build-arg", "BASE_IMAGE=$WINDOWS_IMAGE", "--build-arg", "RUST_VERSION=$RUST_VERSION")
|
||||
|
||||
- $env:WINDOWS_CONTAINER_SCRIPT_PATH = "$env:CI_PROJECT_DIR\container.ps1"
|
||||
- echo "Fetching $env:WINDOWS_CONTAINER_SCRIPT_URL"
|
||||
- Invoke-WebRequest -Uri $env:WINDOWS_CONTAINER_SCRIPT_URL -OutFile $env:WINDOWS_CONTAINER_SCRIPT_PATH
|
||||
|
||||
- "& $env:WINDOWS_CONTAINER_SCRIPT_PATH $CI_REGISTRY $CI_REGISTRY_USER $CI_REGISTRY_PASSWORD $RUST_IMAGE $RUST_UPSTREAM_IMAGE $DOCKERFILE"
|
||||
- |
|
||||
if (!($?)) {
|
||||
echo "Failed to build the image"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
windows rust amd64 docker latest stable:
|
||||
extends: '.windows rust docker build'
|
||||
variables:
|
||||
RUST_IMAGE: !reference [variables, "WINDOWS_RUST_LATEST_IMAGE"]
|
||||
RUST_UPSTREAM_IMAGE: !reference [variables, "WINDOWS_RUST_LATEST_UPSTREAM_IMAGE"]
|
||||
RUST_VERSION: !reference [variables, "RUST_LATEST_VERSION"]
|
||||
|
||||
windows rust amd64 docker minimum supported version:
|
||||
extends: '.windows rust docker build'
|
||||
variables:
|
||||
RUST_IMAGE: !reference [variables, "WINDOWS_RUST_MINIMUM_IMAGE"]
|
||||
RUST_UPSTREAM_IMAGE: !reference [variables, "WINDOWS_RUST_MINIMUM_UPSTREAM_IMAGE"]
|
||||
RUST_VERSION: !reference [variables, "RUST_MINIMUM_VERSION"]
|
||||
|
||||
|
||||
|
||||
# ---- Preparation ----- #
|
||||
#
|
||||
# gst-indent!!
|
||||
#
|
||||
gst indent:
|
||||
extends:
|
||||
- '.gst-indent image'
|
||||
- '.fdo.suffixed-image@debian'
|
||||
stage: 'preparation'
|
||||
needs:
|
||||
- job: 'gst-indent amd64 docker'
|
||||
artifacts: false
|
||||
script:
|
||||
# man indent. grep RETURN VALUE, grab a beer on my behalf...
|
||||
- indent --version || true
|
||||
- ./scripts/gst-indent-all
|
||||
- |
|
||||
if git diff --quiet -- ':!subprojects/gst-integration-testsuites/medias' .; then
|
||||
echo "Code is properly formatted"
|
||||
else
|
||||
git diff --color=always -- ':!subprojects/gst-integration-testsuites/medias' .
|
||||
echo 'style diverges, please run gst-indent first'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# build setup templates
|
||||
#
|
||||
.build_template: &build
|
||||
- ci/scripts/handle-subprojects-cache.py subprojects/
|
||||
# Update subprojects to respect `.wrap` content
|
||||
- meson subprojects update --reset
|
||||
- echo $MESON_ARGS
|
||||
- meson build/ $MESON_ARGS
|
||||
- ninja -C build/
|
||||
- ccache --show-stats
|
||||
|
||||
.build_ccache_vars:
|
||||
variables:
|
||||
CCACHE_COMPILERCHECK: 'content'
|
||||
CCACHE_COMPRESS: 'true'
|
||||
CCACHE_BASEDIR: '/cache/gstreamer/gstreamer'
|
||||
CCACHE_DIR: '/cache/gstreamer/gstreamer/ccache/'
|
||||
# shared across everything really
|
||||
CCACHE_MAXSIZE: '10G'
|
||||
CARGO_HOME: '/cache/gstreamer/cargo'
|
||||
|
||||
.base_modules_changes: &modules_changes
|
||||
- .gitlab-ci.yml
|
||||
- ci/gitlab/*.py
|
||||
- meson.build
|
||||
- subprojects/*.wrap
|
||||
- subprojects/gst-devtools/**/*
|
||||
- subprojects/gst-editing-services/**/*
|
||||
- subprojects/gst-integration-testsuites/**/*
|
||||
- subprojects/gst-libav/**/*
|
||||
- subprojects/gst-omx/**/*
|
||||
- subprojects/gst-plugins-bad/**/*
|
||||
- subprojects/gst-plugins-base/**/*
|
||||
- subprojects/gst-plugins-good/**/*
|
||||
- subprojects/gst-plugins-ugly/**/*
|
||||
- subprojects/gst-python/**/*
|
||||
- subprojects/gstreamer/**/*
|
||||
- subprojects/gstreamer-sharp/**/*
|
||||
- subprojects/gstreamer-vaapi/**/*
|
||||
- subprojects/gst-rtsp-server/**/*
|
||||
|
||||
.simple_fedora_build: &simple_build >-
|
||||
${DEFAULT_MESON_ARGS}
|
||||
-Dsharp=enabled
|
||||
-Domx=enabled
|
||||
-Dgst-omx:target=generic
|
||||
-Ddoc=disabled
|
||||
-Drs=disabled
|
||||
${MESON_BUILDTYPE_ARGS}
|
||||
${MESON_GST_WERROR}
|
||||
|
||||
.build:
|
||||
stage: 'build'
|
||||
extends:
|
||||
- '.build_ccache_vars'
|
||||
needs:
|
||||
- "trigger"
|
||||
# Taking into account the slowest shared runner + time needed to upload the binaries to artifacts
|
||||
# Also need to take into account I/O of pulling docker images and uploading artifacts
|
||||
timeout: '45min'
|
||||
variables:
|
||||
MESON_ARGS: "${DEFAULT_MESON_ARGS} ${MESON_BUILDTYPE_ARGS} ${MESON_GST_WERROR}"
|
||||
script:
|
||||
*build
|
||||
after_script:
|
||||
- mv build/meson-logs/ meson-logs
|
||||
artifacts:
|
||||
expire_in: "7 days"
|
||||
when: "always"
|
||||
paths:
|
||||
- 'meson-logs/'
|
||||
rules:
|
||||
# If this matches, it means the pipeline is running against either the main
|
||||
# or a stable branch, so make it manual
|
||||
- if: '$CI_PROJECT_NAMESPACE == "gstreamer" && $CI_COMMIT_BRANCH == $GST_UPSTREAM_BRANCH'
|
||||
when: manual
|
||||
- changes:
|
||||
*modules_changes
|
||||
|
||||
.build fedora x86_64:
|
||||
extends:
|
||||
- '.fedora image'
|
||||
- '.fdo.suffixed-image@fedora'
|
||||
- '.build'
|
||||
needs:
|
||||
- "fedora amd64 docker"
|
||||
variables:
|
||||
MESON_ARGS: *simple_build
|
||||
|
||||
build nodebug fedora x86_64:
|
||||
extends:
|
||||
- '.fedora image'
|
||||
- '.fdo.suffixed-image@fedora'
|
||||
- '.build'
|
||||
needs:
|
||||
- "fedora amd64 docker"
|
||||
variables:
|
||||
MESON_ARGS: "${DEFAULT_MESON_ARGS} -Dsharp=enabled -Dgstreamer:gst_debug=false -Domx=enabled -Dgst-omx:target=generic ${MESON_BUILDTYPE_ARGS} ${MESON_GST_WERROR}"
|
||||
|
||||
build clang fedora x86_64:
|
||||
extends: '.build fedora x86_64'
|
||||
variables:
|
||||
CC: 'ccache clang'
|
||||
CXX: 'ccache clang++'
|
||||
|
||||
.build windows:
|
||||
image: $WINDOWS_IMAGE
|
||||
stage: 'build'
|
||||
tags:
|
||||
- 'docker'
|
||||
- 'windows'
|
||||
- '1809'
|
||||
needs:
|
||||
- "windows amd64 docker"
|
||||
timeout: '45min'
|
||||
variables:
|
||||
MESON_ARGS: >
|
||||
${DEFAULT_MESON_ARGS}
|
||||
-Dpython=disabled
|
||||
-Dlibav=disabled
|
||||
-Dvaapi=disabled
|
||||
-Dgst-plugins-base:pango=enabled
|
||||
-Dgst-plugins-good:cairo=enabled
|
||||
# Needs to not be empty otherwise the newline -> space replace command in
|
||||
# `script:` will fail
|
||||
MESON_CROSS_ARGS: ' '
|
||||
rules:
|
||||
# If this matches, it means the pipeline is running against either the main
|
||||
# or a stable branch, so make it manual
|
||||
- if: '$CI_PROJECT_NAMESPACE == "gstreamer" && $CI_COMMIT_BRANCH == $GST_UPSTREAM_BRANCH'
|
||||
when: manual
|
||||
- changes:
|
||||
*modules_changes
|
||||
script:
|
||||
- ci/scripts/handle-subprojects-cache.py subprojects/
|
||||
# For some reason, options are separated by newline instead of space, so we
|
||||
# have to replace them first.
|
||||
- $env:MESON_ARGS = $env:MESON_ARGS.replace("`n"," ")
|
||||
- $env:MESON_CROSS_ARGS = $env:MESON_CROSS_ARGS.replace("`n"," ")
|
||||
- cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=$env:ARCH &&
|
||||
meson build $env:MESON_ARGS $env:MESON_CROSS_ARGS &&
|
||||
ninja -C build"
|
||||
# XXX: Re-enable when uploading stops timing out
|
||||
#artifacts:
|
||||
# expire_in: "7 days"
|
||||
# when: "always"
|
||||
# paths:
|
||||
# - 'build/meson-logs/'
|
||||
# - 'vslogs.zip'
|
||||
|
||||
build vs2019 amd64:
|
||||
extends: '.build windows'
|
||||
variables:
|
||||
ARCH: 'amd64'
|
||||
|
||||
build vs2019 x86:
|
||||
extends: '.build windows'
|
||||
variables:
|
||||
ARCH: 'x86'
|
||||
|
||||
build vs2019 arm64 uwp:
|
||||
extends: '.build windows'
|
||||
variables:
|
||||
ARCH: 'arm64'
|
||||
# pango pulls in cairo which pulls in pixman which doesn't build because of
|
||||
# https://github.com/mesonbuild/meson/issues/9889
|
||||
MESON_CROSS_ARGS: >
|
||||
-Dgst-plugins-base:pango=disabled
|
||||
-Dgst-plugins-good:cairo=disabled
|
||||
-Dgst-devtools:cairo=disabled
|
||||
--cross-file ci/meson/vs2019-arm64-cross-file.txt
|
||||
--native-file ci/meson/vs2019-x64-native-file.txt
|
||||
|
||||
build msys2 :
|
||||
extends: '.build windows'
|
||||
timeout: '60min'
|
||||
rules:
|
||||
- changes:
|
||||
*modules_changes
|
||||
allow_failure: true
|
||||
when: 'manual'
|
||||
script:
|
||||
# Make sure powershell exits on errors
|
||||
# https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_preference_variables?view=powershell-6
|
||||
- $ErrorActionPreference = "Stop"
|
||||
|
||||
# Configure MSYS2 to use the UCRT64 environment, start in the same directory
|
||||
# and inherit PATH
|
||||
- $env:MSYSTEM = "UCRT64"
|
||||
- $env:CHERE_INVOKING = "1"
|
||||
- $env:MSYS2_PATH_TYPE = "inherit"
|
||||
# For some reason, options are separated by newline instead of space, so we
|
||||
# have to replace them first.
|
||||
- $env:MESON_ARGS = $env:MESON_ARGS.replace("`n"," ")
|
||||
# Replace forward slashes with backwards so bash doesn't complain
|
||||
- $env:_PROJECT_DIR = $env:CI_PROJECT_DIR.replace('\','/')
|
||||
- C:\msys64\usr\bin\bash -lc "meson build $env:MESON_ARGS && ninja -C build"
|
||||
|
||||
# ---- Tests ----- #
|
||||
|
||||
.test:
|
||||
stage: 'test'
|
||||
extends:
|
||||
- '.build_ccache_vars'
|
||||
needs:
|
||||
- "trigger"
|
||||
variables:
|
||||
MESON_ARGS: *simple_build
|
||||
|
||||
# Disable colored output to avoid weird rendering issues
|
||||
GST_DEBUG_NO_COLOR: "true"
|
||||
CI_ARTIFACTS_URL: "${CI_PROJECT_URL}/-/jobs/${CI_JOB_ID}/artifacts/raw/validate-logs/"
|
||||
GST_VALIDATE_LAUNCHER_FORCE_COLORS: "true"
|
||||
TIMEOUT_FACTOR: "2"
|
||||
CARGO_HOME: "/cache/gstreamer/cargo"
|
||||
# Enable the fault handler so we get backtraces on segfaults.
|
||||
# any non-empty string will do
|
||||
PYTHONFAULTHANDLER: "enabled"
|
||||
rules:
|
||||
- changes:
|
||||
*modules_changes
|
||||
script:
|
||||
- *build
|
||||
|
||||
- echo "-> Running ${TEST_SUITE}"
|
||||
- >
|
||||
./gst-env.py
|
||||
gst-validate-launcher ${TEST_SUITE}
|
||||
--check-bugs
|
||||
--dump-on-failure
|
||||
--mute
|
||||
--shuffle
|
||||
--no-display
|
||||
--meson-no-rebuild
|
||||
--timeout-factor "${TIMEOUT_FACTOR}"
|
||||
--fail-on-testlist-change
|
||||
-l "${CI_PROJECT_DIR}/validate-logs/"
|
||||
--xunit-file "${CI_PROJECT_DIR}/validate-logs/xunit.xml"
|
||||
${EXTRA_VALIDATE_ARGS}
|
||||
after_script:
|
||||
- mv build/meson-logs/ meson-logs
|
||||
artifacts:
|
||||
expire_in: '14 days'
|
||||
when: always
|
||||
paths:
|
||||
- 'meson-logs/'
|
||||
- 'validate-logs'
|
||||
reports:
|
||||
junit:
|
||||
- "validate-logs/*.xml"
|
||||
|
||||
.test fedora x86_64:
|
||||
extends:
|
||||
- '.fedora image'
|
||||
- '.fdo.suffixed-image@fedora'
|
||||
- '.test'
|
||||
needs:
|
||||
- "fedora amd64 docker"
|
||||
tags: ['gstreamer']
|
||||
|
||||
check fedora:
|
||||
extends: '.test fedora x86_64'
|
||||
variables:
|
||||
TEST_SUITE: "check.gst*"
|
||||
|
||||
integration testsuites fedora:
|
||||
extends: '.test fedora x86_64'
|
||||
parallel: 4
|
||||
variables:
|
||||
EXTRA_VALIDATE_ARGS: "--timeout-factor=2 --retry-on-failures --parts=${CI_NODE_TOTAL} --part-index=${CI_NODE_INDEX} --sync"
|
||||
TEST_SUITE: "validate ges"
|
||||
|
||||
# gstreamer-full:
|
||||
gstreamer-full static build:
|
||||
extends: '.build fedora x86_64'
|
||||
stage: 'build'
|
||||
variables:
|
||||
MESON_ARGS: >
|
||||
--default-library=static
|
||||
-Ddoc=disabled
|
||||
$MESON_GST_WERROR
|
||||
|
||||
script:
|
||||
- *build
|
||||
- meson test -C build -v test-gst-full
|
||||
artifacts:
|
||||
expire_in: "7 days"
|
||||
when: "always"
|
||||
paths:
|
||||
- 'meson-logs/'
|
||||
|
||||
gstreamer-full-minimal static build:
|
||||
extends: 'gstreamer-full static build'
|
||||
stage: 'build'
|
||||
variables:
|
||||
MESON_ARGS: >
|
||||
--default-library=static
|
||||
-Ddoc=disabled
|
||||
-Dgstreamer:gst_debug=false
|
||||
-Dauto_features=disabled
|
||||
-Dgstreamer:check=enabled
|
||||
-Dtests=enabled
|
||||
-Dgst-plugins-base:alsa=enabled
|
||||
-Dgst-plugins-base:typefind=enabled
|
||||
-Dgst-plugins-base:pbtypes=enabled
|
||||
-Dgst-full-elements=coreelements:filesrc,fakesink,identity,input-selector
|
||||
-Dgst-full-typefind-functions=typefindfunctions:wav,flv
|
||||
-Dgst-full-device-providers=alsa:alsadeviceprovider
|
||||
-Dgst-full-dynamic-types=pbtypes:video_multiview_flagset
|
||||
$MESON_GST_WERROR
|
||||
|
||||
script:
|
||||
- *build
|
||||
- meson test -C build -v test-gst-full
|
||||
- meson test -C build test-gst-full-features --test-args "-e filesrc,identity,fakesink -E filesink,capsfilter -t audio/x-wav -T video/vivo -d alsadeviceprovider -D v4l2deviceprovider -l GstVideoMultiviewFlagsSet"
|
||||
- strip build/libgstreamer-full-1.0.so
|
||||
- ls -l build/libgstreamer-full-1.0.so
|
||||
artifacts:
|
||||
expire_in: "7 days"
|
||||
when: "always"
|
||||
paths:
|
||||
- 'meson-logs/'
|
||||
|
||||
# Valgrind
|
||||
.valgrind fedora x86_64:
|
||||
extends: '.test fedora x86_64'
|
||||
stage: 'test'
|
||||
variables:
|
||||
EXTRA_VALIDATE_ARGS: "--valgrind"
|
||||
|
||||
valgrind core:
|
||||
extends: '.valgrind fedora x86_64'
|
||||
variables:
|
||||
TEST_SUITE: "check.gstreamer\\..*"
|
||||
rules:
|
||||
- changes:
|
||||
- "*"
|
||||
- scripts/*
|
||||
- ci/**/*
|
||||
- subprojects/gst-devtools/**/*
|
||||
- subprojects/gstreamer/**/*
|
||||
|
||||
valgrind base:
|
||||
extends: '.valgrind fedora x86_64'
|
||||
variables:
|
||||
TEST_SUITE: "check.gst-plugins-base\\..*"
|
||||
rules:
|
||||
- changes:
|
||||
- "*"
|
||||
- scripts/*
|
||||
- ci//**/*
|
||||
- subprojects/gst-devtools/**/*
|
||||
- subprojects/gstreamer/**/*
|
||||
- subprojects/gst-plugins-base/**/*
|
||||
|
||||
valgrind good:
|
||||
extends: '.valgrind fedora x86_64'
|
||||
variables:
|
||||
TEST_SUITE: "check.gst-plugins-good\\..*"
|
||||
# take longer time due to splitmux unit test
|
||||
TIMEOUT_FACTOR: "4"
|
||||
rules:
|
||||
- changes:
|
||||
- "*"
|
||||
- scripts/*
|
||||
- ci/**/*
|
||||
- subprojects/gst-devtools/**/*
|
||||
- subprojects/gstreamer/**/*
|
||||
- subprojects/gst-plugins-base/**/*
|
||||
- subprojects/gst-plugins-good/**/*
|
||||
|
||||
valgrind ugly:
|
||||
extends: '.valgrind fedora x86_64'
|
||||
variables:
|
||||
TEST_SUITE: "check.gst-plugins-ugly\\..*"
|
||||
rules:
|
||||
- changes:
|
||||
- "*"
|
||||
- scripts/*
|
||||
- ci/**/*
|
||||
- subprojects/gst-devtools/**/*
|
||||
- subprojects/gstreamer/**/*
|
||||
- subprojects/gst-plugins-base/**/*
|
||||
- subprojects/gst-plugins-good/**/*
|
||||
- subprojects/gst-plugins-ugly/**/*
|
||||
|
||||
valgrind bad:
|
||||
extends: '.valgrind fedora x86_64'
|
||||
variables:
|
||||
TEST_SUITE: "check.gst-plugins-bad\\..*"
|
||||
rules:
|
||||
- changes:
|
||||
- "*"
|
||||
- scripts/*
|
||||
- ci/**/*
|
||||
- subprojects/gst-devtools/**/*
|
||||
- subprojects/gstreamer/**/*
|
||||
- subprojects/gst-plugins-base/**/*
|
||||
- subprojects/gst-plugins-good/**/*
|
||||
- subprojects/gst-plugins-bad/**/*
|
||||
|
||||
valgrind ges:
|
||||
extends: '.valgrind fedora x86_64'
|
||||
variables:
|
||||
TEST_SUITE: "check.gst-editing-services\\..*"
|
||||
rules:
|
||||
- changes:
|
||||
- "*"
|
||||
- scripts/*
|
||||
- ci/**/*
|
||||
- subprojects/gst-devtools/**/*
|
||||
- subprojects/gstreamer/**/*
|
||||
- subprojects/gst-plugins-base/**/*
|
||||
- subprojects/gst-plugins-good/**/*
|
||||
- subprojects/gst-plugins-bad/**/*
|
||||
- subprojects/gst-editing-services/**/*
|
||||
|
||||
# ---- Integration ----- #
|
||||
|
||||
.documentation:
|
||||
image: $FEDORA_DOCS_IMAGE
|
||||
extends:
|
||||
- '.build_ccache_vars'
|
||||
variables:
|
||||
MESON_ARGS: *simple_build
|
||||
MESON_BUILDTYPE_ARGS: "-Ddoc=enabled"
|
||||
CI_ARTIFACTS_URL: "${CI_PROJECT_URL}/-/jobs/${CI_JOB_ID}/artifacts/raw/"
|
||||
script:
|
||||
# FIXME: should rebuild the image with newer versions!
|
||||
- pip3 install --upgrade hotdoc
|
||||
- pip3 install --upgrade meson
|
||||
- *build
|
||||
- ./gst-env.py ninja -C build/ plugins_doc_caches
|
||||
# Ignore modifications to wrap files made by meson
|
||||
- git checkout subprojects/*.wrap
|
||||
- ./ci/scripts/check-documentation-diff.py
|
||||
- ./gst-env.py hotdoc run --conf-file=build/subprojects/gst-docs/GStreamer-doc.json --fatal-warnings
|
||||
- mv build/subprojects/gst-docs/GStreamer-doc/html documentation/
|
||||
|
||||
artifacts:
|
||||
when: always
|
||||
expire_in: "7 days"
|
||||
paths:
|
||||
- documentation/
|
||||
- plugins-cache-diffs/
|
||||
|
||||
#
|
||||
# This jobs runs in gstreamer namespace when after the merge into main branch.
|
||||
# The produced artifact is later used to automatically update the web page.
|
||||
#
|
||||
documentation:
|
||||
stage: integrate
|
||||
extends:
|
||||
- '.documentation'
|
||||
needs: []
|
||||
rules:
|
||||
- if: '$CI_PROJECT_NAMESPACE == "gstreamer" && $CI_COMMIT_BRANCH == "main"'
|
||||
|
||||
#
|
||||
# This job is run in users namespace to validate documentation before merging
|
||||
# MR.
|
||||
#
|
||||
build documentation:
|
||||
extends:
|
||||
- '.documentation'
|
||||
stage: build
|
||||
needs:
|
||||
- "trigger"
|
||||
rules:
|
||||
# Never run post merge, we have the `documentation` always running for that
|
||||
- if: '$CI_PROJECT_NAMESPACE == "gstreamer" && $CI_COMMIT_BRANCH == $GST_UPSTREAM_BRANCH'
|
||||
when: never
|
||||
- changes:
|
||||
- .gitlab-ci.yml
|
||||
- ci/gitlab/*.py
|
||||
- subprojects/*.wrap
|
||||
- subprojects/gst-docs/**/*
|
||||
- subprojects/gst-devtools/**/*
|
||||
- subprojects/gst-editing-services/**/*
|
||||
- subprojects/gst-libav/**/*
|
||||
- subprojects/gst-omx/**/*
|
||||
- subprojects/gst-plugins-bad/**/*
|
||||
- subprojects/gst-plugins-base/**/*
|
||||
- subprojects/gst-plugins-good/**/*
|
||||
- subprojects/gst-plugins-ugly/**/*
|
||||
- subprojects/gstreamer/**/*
|
||||
- subprojects/gstreamer-vaapi/**/*
|
||||
- subprojects/gst-rtsp-server/**/*
|
||||
|
||||
# FIXME: Using trigger: causes permission issues, workaround using old REST API.
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/341737
|
||||
cerbero trigger:
|
||||
stage: build
|
||||
timeout: '3h'
|
||||
extends:
|
||||
- '.fedora image'
|
||||
- '.fdo.suffixed-image@fedora'
|
||||
needs:
|
||||
- "fedora amd64 docker"
|
||||
script:
|
||||
- ci/gitlab/trigger_cerbero_pipeline.py
|
||||
|
||||
rules:
|
||||
# Never run post merge
|
||||
- if: '$CI_PROJECT_NAMESPACE == "gstreamer" && $CI_COMMIT_BRANCH == $GST_UPSTREAM_BRANCH'
|
||||
when: never
|
||||
- changes:
|
||||
- .gitlab-ci.yml
|
||||
- ci/gitlab/*.py
|
||||
- subprojects/gst-devtools/**/*
|
||||
- subprojects/gst-editing-services/**/*
|
||||
- subprojects/gst-libav/**/*
|
||||
- subprojects/gst-plugins-bad/**/*
|
||||
- subprojects/gst-plugins-base/**/*
|
||||
- subprojects/gst-plugins-good/**/*
|
||||
- subprojects/gst-plugins-ugly/**/*
|
||||
- subprojects/gst-python/**/*
|
||||
- subprojects/gstreamer/**/*
|
||||
- subprojects/gst-rtsp-server/**/*
|
||||
- subprojects/gst-examples/**/*
|
32
.gitlab/issue_templates/Bug.md
Normal file
32
.gitlab/issue_templates/Bug.md
Normal file
@ -0,0 +1,32 @@
|
||||
### Describe your issue
|
||||
<!-- a clear and concise summary of the bug. -->
|
||||
<!-- For any GStreamer usage question, please contact the community using the #gstreamer channel on IRC https://www.oftc.net/ or the mailing list on https://gstreamer.freedesktop.org/lists/ -->
|
||||
|
||||
#### Expected Behavior
|
||||
<!-- What did you expect to happen -->
|
||||
|
||||
#### Observed Behavior
|
||||
<!-- What actually happened -->
|
||||
|
||||
#### Setup
|
||||
- **Operating System:**
|
||||
- **Device:** Computer / Tablet / Mobile / Virtual Machine <!-- Delete as appropriate !-->
|
||||
- **GStreamer Version:**
|
||||
- **Command line:**
|
||||
|
||||
### Steps to reproduce the bug
|
||||
<!-- please fill in exact steps which reproduce the bug on your system, for example: -->
|
||||
1. open terminal
|
||||
2. type `command`
|
||||
|
||||
### How reproducible is the bug?
|
||||
<!-- The reproducibility of the bug is Always/Intermittent/Only once after doing a very specific set of steps-->
|
||||
|
||||
### Screenshots if relevant
|
||||
|
||||
### Solutions you have tried
|
||||
|
||||
### Related non-duplicate issues
|
||||
|
||||
### Additional Information
|
||||
<!-- Any other information such as logs. Make use of <details> for long output -->
|
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
[submodule "subprojects/gst-integration-testsuites/medias"]
|
||||
path = subprojects/gst-integration-testsuites/medias
|
||||
url = https://gitlab.freedesktop.org/gstreamer/gst-integration-testsuites.git
|
6
.indentignore
Normal file
6
.indentignore
Normal file
@ -0,0 +1,6 @@
|
||||
subprojects/gst-plugins-bad/ext/sctp/usrsctp/usrsctplib/
|
||||
subprojects/gstreamer-rs/
|
||||
subprojects/gstreamer-rs-sys/
|
||||
subprojects/gst-plugins-rs/
|
||||
subprojects/gstreamer-sharp/
|
||||
subprojects/gst-integration-testsuites/medias
|
481
LICENSE
Normal file
481
LICENSE
Normal file
@ -0,0 +1,481 @@
|
||||
GNU LIBRARY GENERAL PUBLIC LICENSE
|
||||
Version 2, June 1991
|
||||
|
||||
Copyright (C) 1991 Free Software Foundation, Inc.
|
||||
51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
[This is the first released version of the library GPL. It is
|
||||
numbered 2 because it goes with version 2 of the ordinary GPL.]
|
||||
|
||||
Preamble
|
||||
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
Licenses are intended to guarantee your freedom to share and change
|
||||
free software--to make sure the software is free for all its users.
|
||||
|
||||
This license, the Library General Public License, applies to some
|
||||
specially designated Free Software Foundation software, and to any
|
||||
other libraries whose authors decide to use it. You can use it for
|
||||
your libraries, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
this service if you wish), that you receive source code or can get it
|
||||
if you want it, that you can change the software or use pieces of it
|
||||
in new free programs; and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
anyone to deny you these rights or to ask you to surrender the rights.
|
||||
These restrictions translate to certain responsibilities for you if
|
||||
you distribute copies of the library, or if you modify it.
|
||||
|
||||
For example, if you distribute copies of the library, whether gratis
|
||||
or for a fee, you must give the recipients all the rights that we gave
|
||||
you. You must make sure that they, too, receive or can get the source
|
||||
code. If you link a program with the library, you must provide
|
||||
complete object files to the recipients so that they can relink them
|
||||
with the library, after making changes to the library and recompiling
|
||||
it. And you must show them these terms so they know their rights.
|
||||
|
||||
Our method of protecting your rights has two steps: (1) copyright
|
||||
the library, and (2) offer you this license which gives you legal
|
||||
permission to copy, distribute and/or modify the library.
|
||||
|
||||
Also, for each distributor's protection, we want to make certain
|
||||
that everyone understands that there is no warranty for this free
|
||||
library. If the library is modified by someone else and passed on, we
|
||||
want its recipients to know that what they have is not the original
|
||||
version, so that any problems introduced by others will not reflect on
|
||||
the original authors' reputations.
|
||||
|
||||
Finally, any free program is threatened constantly by software
|
||||
patents. We wish to avoid the danger that companies distributing free
|
||||
software will individually obtain patent licenses, thus in effect
|
||||
transforming the program into proprietary software. To prevent this,
|
||||
we have made it clear that any patent must be licensed for everyone's
|
||||
free use or not licensed at all.
|
||||
|
||||
Most GNU software, including some libraries, is covered by the ordinary
|
||||
GNU General Public License, which was designed for utility programs. This
|
||||
license, the GNU Library General Public License, applies to certain
|
||||
designated libraries. This license is quite different from the ordinary
|
||||
one; be sure to read it in full, and don't assume that anything in it is
|
||||
the same as in the ordinary license.
|
||||
|
||||
The reason we have a separate public license for some libraries is that
|
||||
they blur the distinction we usually make between modifying or adding to a
|
||||
program and simply using it. Linking a program with a library, without
|
||||
changing the library, is in some sense simply using the library, and is
|
||||
analogous to running a utility program or application program. However, in
|
||||
a textual and legal sense, the linked executable is a combined work, a
|
||||
derivative of the original library, and the ordinary General Public License
|
||||
treats it as such.
|
||||
|
||||
Because of this blurred distinction, using the ordinary General
|
||||
Public License for libraries did not effectively promote software
|
||||
sharing, because most developers did not use the libraries. We
|
||||
concluded that weaker conditions might promote sharing better.
|
||||
|
||||
However, unrestricted linking of non-free programs would deprive the
|
||||
users of those programs of all benefit from the free status of the
|
||||
libraries themselves. This Library General Public License is intended to
|
||||
permit developers of non-free programs to use free libraries, while
|
||||
preserving your freedom as a user of such programs to change the free
|
||||
libraries that are incorporated in them. (We have not seen how to achieve
|
||||
this as regards changes in header files, but we have achieved it as regards
|
||||
changes in the actual functions of the Library.) The hope is that this
|
||||
will lead to faster development of free libraries.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow. Pay close attention to the difference between a
|
||||
"work based on the library" and a "work that uses the library". The
|
||||
former contains code derived from the library, while the latter only
|
||||
works together with the library.
|
||||
|
||||
Note that it is possible for a library to be covered by the ordinary
|
||||
General Public License rather than by this special one.
|
||||
|
||||
GNU LIBRARY GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License Agreement applies to any software library which
|
||||
contains a notice placed by the copyright holder or other authorized
|
||||
party saying it may be distributed under the terms of this Library
|
||||
General Public License (also called "this License"). Each licensee is
|
||||
addressed as "you".
|
||||
|
||||
A "library" means a collection of software functions and/or data
|
||||
prepared so as to be conveniently linked with application programs
|
||||
(which use some of those functions and data) to form executables.
|
||||
|
||||
The "Library", below, refers to any such software library or work
|
||||
which has been distributed under these terms. A "work based on the
|
||||
Library" means either the Library or any derivative work under
|
||||
copyright law: that is to say, a work containing the Library or a
|
||||
portion of it, either verbatim or with modifications and/or translated
|
||||
straightforwardly into another language. (Hereinafter, translation is
|
||||
included without limitation in the term "modification".)
|
||||
|
||||
"Source code" for a work means the preferred form of the work for
|
||||
making modifications to it. For a library, complete source code means
|
||||
all the source code for all modules it contains, plus any associated
|
||||
interface definition files, plus the scripts used to control compilation
|
||||
and installation of the library.
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running a program using the Library is not restricted, and output from
|
||||
such a program is covered only if its contents constitute a work based
|
||||
on the Library (independent of the use of the Library in a tool for
|
||||
writing it). Whether that is true depends on what the Library does
|
||||
and what the program that uses the Library does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Library's
|
||||
complete source code as you receive it, in any medium, provided that
|
||||
you conspicuously and appropriately publish on each copy an
|
||||
appropriate copyright notice and disclaimer of warranty; keep intact
|
||||
all the notices that refer to this License and to the absence of any
|
||||
warranty; and distribute a copy of this License along with the
|
||||
Library.
|
||||
|
||||
You may charge a fee for the physical act of transferring a copy,
|
||||
and you may at your option offer warranty protection in exchange for a
|
||||
fee.
|
||||
|
||||
2. You may modify your copy or copies of the Library or any portion
|
||||
of it, thus forming a work based on the Library, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
a) The modified work must itself be a software library.
|
||||
|
||||
b) You must cause the files modified to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
c) You must cause the whole of the work to be licensed at no
|
||||
charge to all third parties under the terms of this License.
|
||||
|
||||
d) If a facility in the modified Library refers to a function or a
|
||||
table of data to be supplied by an application program that uses
|
||||
the facility, other than as an argument passed when the facility
|
||||
is invoked, then you must make a good faith effort to ensure that,
|
||||
in the event an application does not supply such function or
|
||||
table, the facility still operates, and performs whatever part of
|
||||
its purpose remains meaningful.
|
||||
|
||||
(For example, a function in a library to compute square roots has
|
||||
a purpose that is entirely well-defined independent of the
|
||||
application. Therefore, Subsection 2d requires that any
|
||||
application-supplied function or table used by this function must
|
||||
be optional: if the application does not supply it, the square
|
||||
root function must still compute square roots.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Library,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Library, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
entire whole, and thus to each and every part regardless of who wrote
|
||||
it.
|
||||
|
||||
Thus, it is not the intent of this section to claim rights or contest
|
||||
your rights to work written entirely by you; rather, the intent is to
|
||||
exercise the right to control the distribution of derivative or
|
||||
collective works based on the Library.
|
||||
|
||||
In addition, mere aggregation of another work not based on the Library
|
||||
with the Library (or with a work based on the Library) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may opt to apply the terms of the ordinary GNU General Public
|
||||
License instead of this License to a given copy of the Library. To do
|
||||
this, you must alter all the notices that refer to this License, so
|
||||
that they refer to the ordinary GNU General Public License, version 2,
|
||||
instead of to this License. (If a newer version than version 2 of the
|
||||
ordinary GNU General Public License has appeared, then you can specify
|
||||
that version instead if you wish.) Do not make any other change in
|
||||
these notices.
|
||||
|
||||
Once this change is made in a given copy, it is irreversible for
|
||||
that copy, so the ordinary GNU General Public License applies to all
|
||||
subsequent copies and derivative works made from that copy.
|
||||
|
||||
This option is useful when you wish to copy part of the code of
|
||||
the Library into a program that is not a library.
|
||||
|
||||
4. You may copy and distribute the Library (or a portion or
|
||||
derivative of it, under Section 2) in object code or executable form
|
||||
under the terms of Sections 1 and 2 above provided that you accompany
|
||||
it with the complete corresponding machine-readable source code, which
|
||||
must be distributed under the terms of Sections 1 and 2 above on a
|
||||
medium customarily used for software interchange.
|
||||
|
||||
If distribution of object code is made by offering access to copy
|
||||
from a designated place, then offering equivalent access to copy the
|
||||
source code from the same place satisfies the requirement to
|
||||
distribute the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
5. A program that contains no derivative of any portion of the
|
||||
Library, but is designed to work with the Library by being compiled or
|
||||
linked with it, is called a "work that uses the Library". Such a
|
||||
work, in isolation, is not a derivative work of the Library, and
|
||||
therefore falls outside the scope of this License.
|
||||
|
||||
However, linking a "work that uses the Library" with the Library
|
||||
creates an executable that is a derivative of the Library (because it
|
||||
contains portions of the Library), rather than a "work that uses the
|
||||
library". The executable is therefore covered by this License.
|
||||
Section 6 states terms for distribution of such executables.
|
||||
|
||||
When a "work that uses the Library" uses material from a header file
|
||||
that is part of the Library, the object code for the work may be a
|
||||
derivative work of the Library even though the source code is not.
|
||||
Whether this is true is especially significant if the work can be
|
||||
linked without the Library, or if the work is itself a library. The
|
||||
threshold for this to be true is not precisely defined by law.
|
||||
|
||||
If such an object file uses only numerical parameters, data
|
||||
structure layouts and accessors, and small macros and small inline
|
||||
functions (ten lines or less in length), then the use of the object
|
||||
file is unrestricted, regardless of whether it is legally a derivative
|
||||
work. (Executables containing this object code plus portions of the
|
||||
Library will still fall under Section 6.)
|
||||
|
||||
Otherwise, if the work is a derivative of the Library, you may
|
||||
distribute the object code for the work under the terms of Section 6.
|
||||
Any executables containing that work also fall under Section 6,
|
||||
whether or not they are linked directly with the Library itself.
|
||||
|
||||
6. As an exception to the Sections above, you may also compile or
|
||||
link a "work that uses the Library" with the Library to produce a
|
||||
work containing portions of the Library, and distribute that work
|
||||
under terms of your choice, provided that the terms permit
|
||||
modification of the work for the customer's own use and reverse
|
||||
engineering for debugging such modifications.
|
||||
|
||||
You must give prominent notice with each copy of the work that the
|
||||
Library is used in it and that the Library and its use are covered by
|
||||
this License. You must supply a copy of this License. If the work
|
||||
during execution displays copyright notices, you must include the
|
||||
copyright notice for the Library among them, as well as a reference
|
||||
directing the user to the copy of this License. Also, you must do one
|
||||
of these things:
|
||||
|
||||
a) Accompany the work with the complete corresponding
|
||||
machine-readable source code for the Library including whatever
|
||||
changes were used in the work (which must be distributed under
|
||||
Sections 1 and 2 above); and, if the work is an executable linked
|
||||
with the Library, with the complete machine-readable "work that
|
||||
uses the Library", as object code and/or source code, so that the
|
||||
user can modify the Library and then relink to produce a modified
|
||||
executable containing the modified Library. (It is understood
|
||||
that the user who changes the contents of definitions files in the
|
||||
Library will not necessarily be able to recompile the application
|
||||
to use the modified definitions.)
|
||||
|
||||
b) Accompany the work with a written offer, valid for at
|
||||
least three years, to give the same user the materials
|
||||
specified in Subsection 6a, above, for a charge no more
|
||||
than the cost of performing this distribution.
|
||||
|
||||
c) If distribution of the work is made by offering access to copy
|
||||
from a designated place, offer equivalent access to copy the above
|
||||
specified materials from the same place.
|
||||
|
||||
d) Verify that the user has already received a copy of these
|
||||
materials or that you have already sent this user a copy.
|
||||
|
||||
For an executable, the required form of the "work that uses the
|
||||
Library" must include any data and utility programs needed for
|
||||
reproducing the executable from it. However, as a special exception,
|
||||
the source code distributed need not include anything that is normally
|
||||
distributed (in either source or binary form) with the major
|
||||
components (compiler, kernel, and so on) of the operating system on
|
||||
which the executable runs, unless that component itself accompanies
|
||||
the executable.
|
||||
|
||||
It may happen that this requirement contradicts the license
|
||||
restrictions of other proprietary libraries that do not normally
|
||||
accompany the operating system. Such a contradiction means you cannot
|
||||
use both them and the Library together in an executable that you
|
||||
distribute.
|
||||
|
||||
7. You may place library facilities that are a work based on the
|
||||
Library side-by-side in a single library together with other library
|
||||
facilities not covered by this License, and distribute such a combined
|
||||
library, provided that the separate distribution of the work based on
|
||||
the Library and of the other library facilities is otherwise
|
||||
permitted, and provided that you do these two things:
|
||||
|
||||
a) Accompany the combined library with a copy of the same work
|
||||
based on the Library, uncombined with any other library
|
||||
facilities. This must be distributed under the terms of the
|
||||
Sections above.
|
||||
|
||||
b) Give prominent notice with the combined library of the fact
|
||||
that part of it is a work based on the Library, and explaining
|
||||
where to find the accompanying uncombined form of the same work.
|
||||
|
||||
8. You may not copy, modify, sublicense, link with, or distribute
|
||||
the Library except as expressly provided under this License. Any
|
||||
attempt otherwise to copy, modify, sublicense, link with, or
|
||||
distribute the Library is void, and will automatically terminate your
|
||||
rights under this License. However, parties who have received copies,
|
||||
or rights, from you under this License will not have their licenses
|
||||
terminated so long as such parties remain in full compliance.
|
||||
|
||||
9. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Library or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Library (or any work based on the
|
||||
Library), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Library or works based on it.
|
||||
|
||||
10. Each time you redistribute the Library (or any work based on the
|
||||
Library), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute, link with or modify the Library
|
||||
subject to these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties to
|
||||
this License.
|
||||
|
||||
11. If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Library at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Library by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Library.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under any
|
||||
particular circumstance, the balance of the section is intended to apply,
|
||||
and the section as a whole is intended to apply in other circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system which is
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
to distribute software through any other system and a licensee cannot
|
||||
impose that choice.
|
||||
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
12. If the distribution and/or use of the Library is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Library under this License may add
|
||||
an explicit geographical distribution limitation excluding those countries,
|
||||
so that distribution is permitted only in or among countries not thus
|
||||
excluded. In such case, this License incorporates the limitation as if
|
||||
written in the body of this License.
|
||||
|
||||
13. The Free Software Foundation may publish revised and/or new
|
||||
versions of the Library General Public License from time to time.
|
||||
Such new versions will be similar in spirit to the present version,
|
||||
but may differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Library
|
||||
specifies a version number of this License which applies to it and
|
||||
"any later version", you have the option of following the terms and
|
||||
conditions either of that version or of any later version published by
|
||||
the Free Software Foundation. If the Library does not specify a
|
||||
license version number, you may choose any version ever published by
|
||||
the Free Software Foundation.
|
||||
|
||||
14. If you wish to incorporate parts of the Library into other free
|
||||
programs whose distribution conditions are incompatible with these,
|
||||
write to the author to ask for permission. For software which is
|
||||
copyrighted by the Free Software Foundation, write to the Free
|
||||
Software Foundation; we sometimes make exceptions for this. Our
|
||||
decision will be guided by the two goals of preserving the free status
|
||||
of all derivatives of our free software and of promoting the sharing
|
||||
and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
|
||||
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
|
||||
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
|
||||
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
|
||||
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
|
||||
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
|
||||
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
|
||||
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
|
||||
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
|
||||
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
|
||||
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
|
||||
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
|
||||
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
|
||||
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
|
||||
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
|
||||
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
Appendix: How to Apply These Terms to Your New Libraries
|
||||
|
||||
If you develop a new library, and you want it to be of the greatest
|
||||
possible use to the public, we recommend making it free software that
|
||||
everyone can redistribute and change. You can do so by permitting
|
||||
redistribution under these terms (or, alternatively, under the terms of the
|
||||
ordinary General Public License).
|
||||
|
||||
To apply these terms, attach the following notices to the library. It is
|
||||
safest to attach them to the start of each source file to most effectively
|
||||
convey the exclusion of warranty; and each file should have at least the
|
||||
"copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the library's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Library General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Library General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Library General Public
|
||||
License along with this library; if not, write to the Free
|
||||
Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or your
|
||||
school, if any, to sign a "copyright disclaimer" for the library, if
|
||||
necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright interest in the
|
||||
library `Frob' (a library for tweaking knobs) written by James Random Hacker.
|
||||
|
||||
<signature of Ty Coon>, 1 April 1990
|
||||
Ty Coon, President of Vice
|
||||
|
||||
That's all there is to it!
|
462
README.md
462
README.md
@ -1,2 +1,462 @@
|
||||
# gstreamer
|
||||
# GStreamer
|
||||
|
||||
This is GStreamer, a framework for streaming media.
|
||||
|
||||
## Where to start
|
||||
|
||||
We have a website at
|
||||
|
||||
https://gstreamer.freedesktop.org
|
||||
|
||||
Our documentation, including tutorials, API reference and FAQ can be found at
|
||||
|
||||
https://gstreamer.freedesktop.org/documentation/
|
||||
|
||||
You can subscribe to our mailing lists:
|
||||
|
||||
https://lists.freedesktop.org/mailman/listinfo/gstreamer-announce
|
||||
|
||||
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
|
||||
|
||||
We track bugs, feature requests and merge requests (patches) in GitLab at
|
||||
|
||||
https://gitlab.freedesktop.org/gstreamer/
|
||||
|
||||
You can join us on IRC - #gstreamer on irc.oftc.net
|
||||
|
||||
This repository contains all official modules supported by the GStreamer
|
||||
community which can be found in the `subprojects/` directory.
|
||||
|
||||
## Getting started
|
||||
|
||||
### Install git and python 3.5+
|
||||
|
||||
If you're on Linux, you probably already have these. On macOS, you can use the
|
||||
[official Python installer](https://www.python.org/downloads/mac-osx/).
|
||||
|
||||
You can find [instructions for Windows below](#windows-prerequisites-setup).
|
||||
|
||||
### Install meson and ninja
|
||||
|
||||
Meson 0.59 or newer is required.
|
||||
|
||||
On Linux and macOS you can get meson through your package manager or using:
|
||||
|
||||
$ pip3 install --user meson
|
||||
|
||||
This will install meson into `~/.local/bin` which may or may not be included
|
||||
automatically in your PATH by default.
|
||||
|
||||
You should get `ninja` using your package manager or download the [official
|
||||
release](https://github.com/ninja-build/ninja/releases) and put the `ninja`
|
||||
binary in your PATH.
|
||||
|
||||
You can find [instructions for Windows below](#windows-prerequisites-setup).
|
||||
|
||||
### Build GStreamer and its modules
|
||||
|
||||
You can get all GStreamer built running:
|
||||
|
||||
```
|
||||
meson builddir
|
||||
ninja -C builddir
|
||||
```
|
||||
|
||||
This will automatically create the `build` directory and build everything
|
||||
inside it.
|
||||
|
||||
NOTE: On Windows, you *must* run this from [inside the Visual Studio command
|
||||
prompt](#running-meson-on-windows) of the appropriate architecture and version.
|
||||
|
||||
### External dependencies
|
||||
|
||||
All mandatory dependencies of GStreamer are included as [meson subprojects](https://mesonbuild.com/Subprojects.html):
|
||||
libintl, zlib, libffi, glib. Some optional dependencies are also included as
|
||||
subprojects, such as ffmpeg, x264, json-glib, graphene, openh264, orc, etc.
|
||||
|
||||
Mandatory dependencies will be automatically built if meson cannot find them on
|
||||
your system using pkg-config. The same is true for optional dependencies that
|
||||
are included as subprojects. You can find a full list by looking at the
|
||||
`subprojects` directory.
|
||||
|
||||
Plugins that need optional dependencies that aren't included can only be built
|
||||
if they are provided by the system. Instructions on how to build some common
|
||||
ones such as Qt5/QML are listed below. If you do not know how to provide an
|
||||
optional dependency needed by a plugin, you should use [Cerbero](https://gitlab.freedesktop.org/gstreamer/cerbero/#description)
|
||||
which handles this for you automatically.
|
||||
|
||||
Plugins will be automatically enabled if possible, but you can ensure that
|
||||
a particular plugin (especially if it has external dependencies) is built by
|
||||
enabling the gstreamer repository that ships it and the plugin inside it. For
|
||||
example, to enable the Qt5 plugin in the gst-plugins-good repository, you need
|
||||
to run meson as follows:
|
||||
|
||||
```
|
||||
meson -Dgood=enabled -Dgst-plugins-good:qt5=enabled builddir
|
||||
```
|
||||
|
||||
This will cause Meson to error out if the plugin could not be enabled. You can
|
||||
also flip the default and disable all plugins except those explicitly enabled
|
||||
like so:
|
||||
|
||||
```
|
||||
meson -Dauto_features=disabled -Dgstreamer:tools=enabled -Dbad=enabled -Dgst-plugins-bad:openh264=enabled
|
||||
```
|
||||
|
||||
This will disable all optional features and then enable the `openh264` plugin
|
||||
and the tools that ship with the core gstreamer repository: `gst-inspect-1.0`,
|
||||
`gst-launch-1.0`, etc. As usual, you can change these values on a builddir that
|
||||
has already been setup with `meson configure -Doption=value`.
|
||||
|
||||
### Building the Qt5 QML plugin
|
||||
|
||||
If `qmake` is not in `PATH` and pkgconfig files are not available, you can
|
||||
point the `QMAKE` env var to the Qt5 installation of your choosing before
|
||||
running `meson` as shown above.
|
||||
|
||||
The plugin will be automatically enabled if possible, but you can ensure that
|
||||
it is built by passing `-Dgood=enabled -Dgst-plugins-good:qt5=enabled` to `meson`.
|
||||
|
||||
### Building the Intel MSDK plugin
|
||||
|
||||
On Linux, you need to have development files for `libmfx` installed. On
|
||||
Windows, if you have the [Intel Media SDK](https://software.intel.com/en-us/media-sdk),
|
||||
it will set the `INTELMEDIASDKROOT` environment variable, which will be used by
|
||||
the build files to find `libmfx`.
|
||||
|
||||
The plugin will be automatically enabled if possible, but you can ensure it by
|
||||
passing `-Dbad=enabled -Dgst-plugins-bad:msdk=enabled` to `meson`.
|
||||
|
||||
### Building plugins with (A)GPL-licensed dependencies
|
||||
|
||||
Some plugins have GPL- or AGPL-licensed dependencies and will only be built
|
||||
if you have explicitly opted in to allow (A)GPL-licensed dependencies by
|
||||
passing `-Dgpl=enabled` to Meson.
|
||||
|
||||
List of plugins with (A)GPL-licensed dependencies (non-exhaustive) in gst-plugins-bad:
|
||||
- dts (DTS audio decoder plugin)
|
||||
- faad (Free AAC audio decoder plugin)
|
||||
- iqa (Image quality assessment plugin based on dssim-c)
|
||||
- mpeg2enc (MPEG-2 video encoder plugin)
|
||||
- mplex (audio/video multiplexer plugin)
|
||||
- ofa (Open Fingerprint Architecture library plugin)
|
||||
- resindvd (Resin DVD playback plugin)
|
||||
- x265 (HEVC/H.265 video encoder plugin)
|
||||
|
||||
List of plugins with (A)GPL-licensed dependencies (non-exhaustive) in gst-plugins-ugly:
|
||||
- a52dec (Dolby Digital (AC-3) audio decoder plugin)
|
||||
- cdio (CD audio source plugin based on libcdio)
|
||||
- dvdread (DVD video source plugin based on libdvdread)
|
||||
- mpeg2dec (MPEG-2 video decoder plugin based on libmpeg2)
|
||||
- sidplay (Commodore 64 audio decoder plugin based on libsidplay)
|
||||
- x264 (H.264 video encoder plugin based on libx264)
|
||||
|
||||
### Static build
|
||||
|
||||
Since *1.18.0* when doing a static build using `--default-library=static`, a
|
||||
shared library `gstreamer-full-1.0` will be produced and includes all enabled
|
||||
GStreamer plugins and libraries. A list of libraries that needs to be exposed in
|
||||
`gstreamer-full-1.0` ABI can be set using `gst-full-libraries` option. glib-2.0,
|
||||
gobject-2.0 and gstreamer-1.0 are always included.
|
||||
|
||||
```
|
||||
meson --default-library=static -Dgst-full-libraries=app,video builddir
|
||||
```
|
||||
|
||||
GStreamer *1.18* requires applications using gstreamer-full-1.0 to initialize
|
||||
static plugins by calling `gst_init_static_plugins()` after `gst_init()`. That
|
||||
function is defined in `gst/gstinitstaticplugins.h` header file.
|
||||
|
||||
Since *1.20.0* `gst_init_static_plugins()` is called automatically by
|
||||
`gst_init()` and applications must not call it manually any more. The header
|
||||
file has been removed from public API.
|
||||
|
||||
One can use the `gst-full-version-script` option to pass a
|
||||
[version script](https://www.gnu.org/software/gnulib/manual/html_node/LD-Version-Scripts.html)
|
||||
to the linker. This can be used to control the exact symbols that are exported by
|
||||
the gstreamer-full library, allowing the linker to garbage collect unused code
|
||||
and so reduce the total library size. A default script `gstreamer-full-default.map`
|
||||
declares only glib/gstreamer symbols as public.
|
||||
|
||||
One can use the `gst-full-plugins` option to pass a list of plugins to be registered
|
||||
in the gstreamer-full library. The default value is '*' which means that all the plugins selected
|
||||
during the build process will be registered statically. An empty value will prevent any plugins to
|
||||
be registered.
|
||||
|
||||
One can select a specific set of features with `gst-full-elements`, `gst-full-typefind-functions`, `gst-full-device-providers` or `gst-full-dynamic-types` to select specific feature from a plugin.
|
||||
When a feature has been listed in one of those options, the other features from its plugin will no longer be automatically included, even if the plugin is listed in `gst-full-plugins`.
|
||||
|
||||
The user must insure that all selected plugins and features (element, typefind, etc.) have been
|
||||
enabled during the build configuration.
|
||||
|
||||
To register features, the syntax is the following:
|
||||
plugins are separated by ';' and features from a plugin starts after ':' and are ',' separated.
|
||||
|
||||
As an example:
|
||||
* `-Dgst-full-plugins=coreelements;playback;typefindfunctions;alsa;pbtypes`: enable only `coreelements`, `playback`, `typefindfunctions`, `alsa`, `pbtypes` plugins.
|
||||
* `-Dgst-full-elements=coreelements:filesrc,fakesink,identity;alsa:alsasrc`: enable only `filesrc`, `identity` and `fakesink` elements from `coreelements` and `alsasrc` element from `alsa` plugin.
|
||||
* `-Dgst-full-typefind-functions=typefindfunctions:wav,flv`: enable only typefind func `wav` and `flv` from `typefindfunctions`
|
||||
* `-Dgst-full-device-providers=alsa:alsadeviceprovider`: enable `alsadeviceprovider` from `alsa`.
|
||||
* `-Dgst-full-dynamic-types=pbtypes:video_multiview_flagset`: enable `video_multiview_flagset` from `pbtypes
|
||||
|
||||
All features from the `playback` plugin will be enabled and the other plugins will be restricted to the specific features requested.
|
||||
|
||||
All the selected features will be registered into a dedicated `NULL` plugin name.
|
||||
|
||||
This will cause the features/plugins that are not registered to not be included in the final gstreamer-full library.
|
||||
|
||||
This is an experimental feature, backward incompatible changes could still be
|
||||
made in the future.
|
||||
|
||||
# Development environment
|
||||
|
||||
## Development environment target
|
||||
|
||||
GStreamer also contains a special `devenv` target that lets you enter an
|
||||
development environment where you will be able to work on GStreamer
|
||||
easily. You can get into that environment running:
|
||||
|
||||
```
|
||||
ninja -C builddir devenv
|
||||
```
|
||||
|
||||
If your operating system handles symlinks, built modules source code will be
|
||||
available at the root for example GStreamer core will be in
|
||||
`gstreamer/`. Otherwise they will be present in `subprojects/`. You can simply
|
||||
hack in there and to rebuild you just need to rerun `ninja -C builddir`.
|
||||
|
||||
NOTE: In the development environment, a fully usable prefix is also configured
|
||||
in `gstreamer/prefix` where you can install any extra dependency/project.
|
||||
|
||||
An external script can be run in development environment with:
|
||||
|
||||
```
|
||||
./gst-env.py external_script.sh
|
||||
```
|
||||
|
||||
For more extensive documentation about the development environment go to [the
|
||||
documentation](https://gstreamer.freedesktop.org/documentation/installing/building-from-source-using-meson.html).
|
||||
|
||||
## Custom subprojects
|
||||
|
||||
We also added a meson option, `custom_subprojects`, that allows the user
|
||||
to provide a comma-separated list of subprojects that should be built
|
||||
alongside the default ones.
|
||||
|
||||
To use it:
|
||||
|
||||
```
|
||||
cd subprojects
|
||||
git clone my_subproject
|
||||
cd ../build
|
||||
rm -rf * && meson .. -Dcustom_subprojects=my_subproject
|
||||
ninja
|
||||
```
|
||||
|
||||
## Run tests
|
||||
|
||||
You can easily run the test of all the components:
|
||||
|
||||
```
|
||||
meson test -C build
|
||||
```
|
||||
|
||||
To list all available tests:
|
||||
|
||||
```
|
||||
meson test -C builddir --list
|
||||
```
|
||||
|
||||
To run all the tests of a specific component:
|
||||
|
||||
```
|
||||
meson test -C builddir --suite gst-plugins-base
|
||||
```
|
||||
|
||||
Or to run a specific test file:
|
||||
|
||||
```
|
||||
meson test -C builddir --suite gstreamer gst_gstbuffer
|
||||
```
|
||||
|
||||
Run a specific test from a specific test file:
|
||||
|
||||
```
|
||||
GST_CHECKS=test_subbuffer meson test -C builddir --suite gstreamer gst_gstbuffer
|
||||
```
|
||||
|
||||
## Optional Installation
|
||||
|
||||
You can also install everything that is built into a predetermined prefix like
|
||||
so:
|
||||
|
||||
```
|
||||
meson --prefix=/path/to/install/prefix builddir
|
||||
ninja -C builddir
|
||||
meson install -C builddir
|
||||
```
|
||||
|
||||
Note that the installed files have `RPATH` stripped, so you will need to set
|
||||
`LD_LIBRARY_PATH`, `DYLD_LIBRARY_PATH`, or `PATH` as appropriate for your
|
||||
platform for things to work.
|
||||
|
||||
|
||||
## Add information about GStreamer development environment in your prompt line
|
||||
|
||||
### Bash prompt
|
||||
|
||||
We automatically handle `bash` and set `$PS1` accordingly.
|
||||
|
||||
If the automatic `$PS1` override is not desired (maybe you have a fancy custom
|
||||
prompt), set the `$GST_BUILD_DISABLE_PS1_OVERRIDE` environment variable to
|
||||
`TRUE` and use `$GST_ENV` when setting the custom prompt, for example with a
|
||||
snippet like the following:
|
||||
|
||||
```bash
|
||||
...
|
||||
if [[ -n "${GST_ENV-}" ]];
|
||||
then
|
||||
PS1+="[ ${GST_ENV} ]"
|
||||
fi
|
||||
...
|
||||
```
|
||||
|
||||
### Using powerline
|
||||
|
||||
In your powerline theme configuration file (by default in
|
||||
`{POWERLINE INSTALLATION DIR}/config_files/themes/shell/default.json`)
|
||||
you should add a new environment segment as follow:
|
||||
|
||||
```
|
||||
{
|
||||
"function": "powerline.segments.common.env.environment",
|
||||
"args": { "variable": "GST_ENV" },
|
||||
"priority": 50
|
||||
},
|
||||
```
|
||||
|
||||
## Windows Prerequisites Setup
|
||||
|
||||
On Windows, some of the components may require special care.
|
||||
|
||||
### Git for Windows
|
||||
|
||||
Use the [Git for Windows](https://gitforwindows.org/) installer. It will
|
||||
install a `bash` prompt with basic shell utils and up-to-date git binaries.
|
||||
|
||||
During installation, when prompted about `PATH`, you should select the
|
||||
following option:
|
||||
|
||||

|
||||
|
||||
### Python 3.5+ on Windows
|
||||
|
||||
Use the [official Python installer](https://www.python.org/downloads/windows/).
|
||||
You must ensure that Python is installed into `PATH`:
|
||||
|
||||

|
||||
|
||||
You may also want to customize the installation and install it into
|
||||
a system-wide location such as `C:\PythonXY`, but this is not required.
|
||||
|
||||
### Ninja on Windows
|
||||
|
||||
The easiest way to install Ninja on Windows is with `pip3`, which will download
|
||||
the compiled binary and place it into the `Scripts` directory inside your
|
||||
Python installation:
|
||||
|
||||
```
|
||||
pip3 install ninja
|
||||
```
|
||||
|
||||
You can also download the [official release](https://github.com/ninja-build/ninja/releases)
|
||||
and place it into `PATH`.
|
||||
|
||||
### Meson on Windows
|
||||
|
||||
**IMPORTANT**: Do not use the Meson MSI installer since it is experimental and known to not
|
||||
work with `GStreamer`.
|
||||
|
||||
You can use `pip3` to install Meson, same as Ninja above:
|
||||
|
||||
```
|
||||
pip3 install meson
|
||||
```
|
||||
|
||||
Note that Meson is written entirely in Python, so you can also run it as-is
|
||||
from the [git repository](https://github.com/mesonbuild/meson/) if you want to
|
||||
use the latest master branch for some reason.
|
||||
|
||||
### Running Meson on Windows
|
||||
|
||||
At present, to build with Visual Studio, you need to run Meson from inside the
|
||||
VS 2019 command prompt. Press `Start`, and search for `VS 2019`, and click on
|
||||
`x64 Native Tools Command Prompt for VS 2019`, or a prompt named similar to
|
||||
that:
|
||||
|
||||

|
||||
|
||||
**ARM64 native only**: Since Visual Studio might not install dedicated command
|
||||
prompt for native ARM64 build, you might need to run `vcvarsx86_arm64.bat` on CMD.
|
||||
Please refer to [this document](https://docs.microsoft.com/en-us/cpp/build/building-on-the-command-line?view=vs-2019#developer_command_file_locations)
|
||||
|
||||
### Setup a mingw/wine based development environment on linux
|
||||
|
||||
#### Install wine and mingw
|
||||
|
||||
##### On fedora x64
|
||||
|
||||
``` sh
|
||||
sudo dnf install mingw64-gcc mingw64-gcc-c++ mingw64-pkg-config mingw64-winpthreads wine
|
||||
```
|
||||
|
||||
FIXME: Figure out what needs to be installed on other distros
|
||||
|
||||
#### Get meson from git
|
||||
|
||||
This simplifies the process and allows us to use the cross files
|
||||
defined in meson itself.
|
||||
|
||||
``` sh
|
||||
git clone https://github.com/mesonbuild/meson.git
|
||||
```
|
||||
|
||||
#### Build and install
|
||||
|
||||
```
|
||||
BUILDDIR=$PWD/winebuild/
|
||||
export WINEPREFIX=$BUILDDIR/wine-prefix/ && mkdir -p $WINEPREFIX
|
||||
# Setting the prefix is mandatory as it is used to setup symlinks within the development environment
|
||||
meson/meson.py $BUILDDIR --cross-file meson/cross/linux-mingw-w64-64bit.txt -Dgst-plugins-bad:vulkan=disabled -Dorc:gtk_doc=disabled --prefix=$BUILDDIR/wininstall/ -Djson-glib:gtk_doc=disabled
|
||||
meson/meson.py install -C $BUILDDIR/
|
||||
```
|
||||
|
||||
> __NOTE__: You should use `meson install -C $BUILDDIR` each time you make a change
|
||||
> instead of the usual `ninja -C build` as this is not in the development environment.
|
||||
|
||||
#### The development environment
|
||||
|
||||
You can get into the development environment the usual way:
|
||||
|
||||
```
|
||||
ninja -C $BUILDDIR/ devenv
|
||||
```
|
||||
|
||||
Alternatively, if you'd rather not start a shell in your workflow, you
|
||||
can mutate the current environment into a suitable state like so:
|
||||
|
||||
```
|
||||
gst-env.py --only-environment
|
||||
```
|
||||
|
||||
This will print output suitable for an sh-compatible `eval` function,
|
||||
just like `ssh-agent -s`.
|
||||
|
||||
After setting up [binfmt] to use wine for windows binaries,
|
||||
you can run GStreamer tools under wine by running:
|
||||
|
||||
```
|
||||
gst-launch-1.0.exe videotestsrc ! glimagesink
|
||||
```
|
||||
|
||||
[binfmt]: http://man7.org/linux/man-pages/man5/binfmt.d.5.html
|
||||
|
9
ci/README.txt
Normal file
9
ci/README.txt
Normal file
@ -0,0 +1,9 @@
|
||||
GStreamer Continuous Integration
|
||||
================================
|
||||
|
||||
This repository contains all material relevant to the GStreamer
|
||||
Continuous Integration system.
|
||||
|
||||
* Docker images
|
||||
|
||||
* Build scripts and code
|
2
ci/docker/README
Normal file
2
ci/docker/README
Normal file
@ -0,0 +1,2 @@
|
||||
GStreamer Docker images
|
||||
|
259
ci/docker/fedora/prepare.sh
Normal file
259
ci/docker/fedora/prepare.sh
Normal file
@ -0,0 +1,259 @@
|
||||
set -eux
|
||||
|
||||
# Fedora base image disable installing documentation files. See https://pagure.io/atomic-wg/issue/308
|
||||
# We need them to cleanly build our doc.
|
||||
sed -i "s/tsflags=nodocs//g" /etc/dnf/dnf.conf
|
||||
|
||||
dnf install -y git-core ninja-build dnf-plugins-core python3-pip
|
||||
|
||||
# Configure git for various usage
|
||||
git config --global user.email "gstreamer@gstreamer.net"
|
||||
git config --global user.name "Gstbuild Runner"
|
||||
|
||||
# Add rpm fusion repositories in order to access all of the gst plugins
|
||||
sudo dnf install -y \
|
||||
"https://mirrors.rpmfusion.org/free/fedora/rpmfusion-free-release-$(rpm -E %fedora).noarch.rpm" \
|
||||
"https://mirrors.rpmfusion.org/nonfree/fedora/rpmfusion-nonfree-release-$(rpm -E %fedora).noarch.rpm"
|
||||
|
||||
dnf upgrade -y
|
||||
|
||||
# install rest of the extra deps
|
||||
dnf install -y \
|
||||
aalib-devel \
|
||||
aom \
|
||||
bat \
|
||||
intel-mediasdk-devel \
|
||||
libaom \
|
||||
libaom-devel \
|
||||
libcaca-devel \
|
||||
libdav1d \
|
||||
libdav1d-devel \
|
||||
ccache \
|
||||
cmake \
|
||||
clang-devel \
|
||||
elfutils \
|
||||
elfutils-libs \
|
||||
elfutils-devel \
|
||||
gcc \
|
||||
gcc-c++ \
|
||||
gdb \
|
||||
git-lfs \
|
||||
glslc \
|
||||
gtk3 \
|
||||
gtk3-devel \
|
||||
gtest \
|
||||
gtest-devel \
|
||||
graphene \
|
||||
graphene-devel \
|
||||
gsl \
|
||||
gsl-devel \
|
||||
gupnp \
|
||||
gupnp-devel \
|
||||
gupnp-igd \
|
||||
gupnp-igd-devel \
|
||||
gssdp \
|
||||
gssdp-devel \
|
||||
faac-devel \
|
||||
ffmpeg \
|
||||
ffmpeg-libs \
|
||||
ffmpeg-devel \
|
||||
flex \
|
||||
flite \
|
||||
flite-devel \
|
||||
mono-devel \
|
||||
procps-ng \
|
||||
patch \
|
||||
qt5-devel \
|
||||
redhat-rpm-config \
|
||||
json-glib \
|
||||
json-glib-devel \
|
||||
libnice \
|
||||
libnice-devel \
|
||||
libsodium-devel \
|
||||
libunwind \
|
||||
libunwind-devel \
|
||||
libyaml-devel \
|
||||
libxml2-devel \
|
||||
libxslt-devel \
|
||||
llvm-devel \
|
||||
log4c-devel \
|
||||
make \
|
||||
nasm \
|
||||
neon \
|
||||
neon-devel \
|
||||
nunit \
|
||||
npm \
|
||||
opencv \
|
||||
opencv-devel \
|
||||
openjpeg2 \
|
||||
openjpeg2-devel \
|
||||
SDL2 \
|
||||
SDL2-devel \
|
||||
sbc \
|
||||
sbc-devel \
|
||||
x264 \
|
||||
x264-libs \
|
||||
x264-devel \
|
||||
python3 \
|
||||
python3-devel \
|
||||
python3-libs \
|
||||
python3-gobject \
|
||||
python3-cairo \
|
||||
python3-cairo-devel \
|
||||
valgrind \
|
||||
vulkan \
|
||||
vulkan-devel \
|
||||
mesa-omx-drivers \
|
||||
mesa-libGL \
|
||||
mesa-libGL-devel \
|
||||
mesa-libGLU \
|
||||
mesa-libGLU-devel \
|
||||
mesa-libGLES \
|
||||
mesa-libGLES-devel \
|
||||
mesa-libOpenCL \
|
||||
mesa-libOpenCL-devel \
|
||||
mesa-libgbm \
|
||||
mesa-libgbm-devel \
|
||||
mesa-libd3d \
|
||||
mesa-libd3d-devel \
|
||||
mesa-libOSMesa \
|
||||
mesa-libOSMesa-devel \
|
||||
mesa-vulkan-drivers \
|
||||
wpewebkit \
|
||||
wpewebkit-devel \
|
||||
xorg-x11-server-utils \
|
||||
xorg-x11-server-Xvfb
|
||||
|
||||
# Install common debug symbols
|
||||
dnf debuginfo-install -y gtk3 \
|
||||
glib2 \
|
||||
glibc \
|
||||
gupnp \
|
||||
gupnp-igd \
|
||||
gssdp \
|
||||
freetype \
|
||||
openjpeg \
|
||||
gobject-introspection \
|
||||
python3 \
|
||||
python3-libs \
|
||||
python3-gobject \
|
||||
libappstream-glib-devel \
|
||||
libjpeg-turbo \
|
||||
glib-networking \
|
||||
libcurl \
|
||||
libsoup \
|
||||
nss \
|
||||
nss-softokn \
|
||||
nss-softokn-freebl \
|
||||
nss-sysinit \
|
||||
nss-util \
|
||||
openssl \
|
||||
openssl-libs \
|
||||
openssl-pkcs11 \
|
||||
brotli \
|
||||
bzip2-libs \
|
||||
gpm-libs \
|
||||
harfbuzz \
|
||||
harfbuzz-icu \
|
||||
json-c \
|
||||
json-glib \
|
||||
libbabeltrace \
|
||||
libffi \
|
||||
libsrtp \
|
||||
libunwind \
|
||||
mpg123-libs \
|
||||
neon \
|
||||
orc-compiler \
|
||||
orc \
|
||||
pixman \
|
||||
pulseaudio-libs \
|
||||
pulseaudio-libs-glib2 \
|
||||
wavpack \
|
||||
webrtc-audio-processing \
|
||||
ffmpeg \
|
||||
ffmpeg-libs \
|
||||
faad2-libs \
|
||||
libavdevice \
|
||||
libmpeg2 \
|
||||
faac \
|
||||
fdk-aac \
|
||||
x264 \
|
||||
x264-libs \
|
||||
x265 \
|
||||
x265-libs \
|
||||
xz \
|
||||
xz-libs \
|
||||
zip \
|
||||
zlib
|
||||
|
||||
# Install the dependencies of gstreamer
|
||||
dnf builddep -y gstreamer1 \
|
||||
gstreamer1-plugins-base \
|
||||
gstreamer1-plugins-good \
|
||||
gstreamer1-plugins-good-extras \
|
||||
gstreamer1-plugins-ugly \
|
||||
gstreamer1-plugins-ugly-free \
|
||||
gstreamer1-plugins-bad-free \
|
||||
gstreamer1-plugins-bad-free-extras \
|
||||
gstreamer1-plugins-bad-freeworld \
|
||||
gstreamer1-libav \
|
||||
gstreamer1-rtsp-server \
|
||||
gstreamer1-vaapi \
|
||||
python3-gstreamer1
|
||||
|
||||
dnf remove -y meson
|
||||
pip3 install meson==0.59.1 hotdoc python-gitlab
|
||||
|
||||
|
||||
# Remove gst-devel packages installed by builddep above
|
||||
dnf remove -y "gstreamer1*devel"
|
||||
|
||||
# FIXME: Why does installing directly with dnf doesn't actually install
|
||||
# the documentation files?
|
||||
dnf download glib2-doc gdk-pixbuf2-devel*x86_64* gtk3-devel-docs
|
||||
rpm -i --reinstall *.rpm
|
||||
rm -f *.rpm
|
||||
|
||||
# Install Rust
|
||||
RUSTUP_VERSION=1.24.3
|
||||
RUST_VERSION=1.55.0
|
||||
RUST_ARCH="x86_64-unknown-linux-gnu"
|
||||
|
||||
dnf install -y wget
|
||||
RUSTUP_URL=https://static.rust-lang.org/rustup/archive/$RUSTUP_VERSION/$RUST_ARCH/rustup-init
|
||||
wget $RUSTUP_URL
|
||||
dnf remove -y wget
|
||||
|
||||
export RUSTUP_HOME="/usr/local/rustup"
|
||||
export CARGO_HOME="/usr/local/cargo"
|
||||
export PATH="/usr/local/cargo/bin:$PATH"
|
||||
|
||||
chmod +x rustup-init;
|
||||
./rustup-init -y --no-modify-path --profile minimal --default-toolchain $RUST_VERSION;
|
||||
rm rustup-init;
|
||||
chmod -R a+w $RUSTUP_HOME $CARGO_HOME
|
||||
|
||||
rustup --version
|
||||
cargo --version
|
||||
rustc --version
|
||||
|
||||
# get gstreamer and make all subprojects available
|
||||
git clone -b ${GIT_BRANCH} ${GIT_URL} /gstreamer
|
||||
git -C /gstreamer submodule update --init --depth=1
|
||||
meson subprojects download --sourcedir /gstreamer
|
||||
/gstreamer/ci/scripts/handle-subprojects-cache.py --build /gstreamer/subprojects/
|
||||
|
||||
# Run git gc to prune unwanted refs and reduce the size of the image
|
||||
for i in $(find /subprojects/ -mindepth 1 -maxdepth 1 -type d);
|
||||
do
|
||||
git -C $i gc --aggressive || true;
|
||||
done
|
||||
|
||||
# Now remove the gstreamer clone
|
||||
rm -rf /gstreamer
|
||||
|
||||
echo "Removing DNF cache"
|
||||
dnf clean all
|
||||
|
||||
rm -R /root/*
|
||||
rm -rf /var/cache/dnf /var/log/dnf*
|
1
ci/docker/windows/.dockerignore
Normal file
1
ci/docker/windows/.dockerignore
Normal file
@ -0,0 +1 @@
|
||||
build_image.ps1
|
60
ci/docker/windows/Dockerfile
Normal file
60
ci/docker/windows/Dockerfile
Normal file
@ -0,0 +1,60 @@
|
||||
# escape=`
|
||||
|
||||
FROM 'mcr.microsoft.com/windows/servercore:ltsc2019'
|
||||
|
||||
# Make sure any failure in PowerShell scripts is fatal
|
||||
SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop';"]
|
||||
ENV ErrorActionPreference='Stop'
|
||||
|
||||
# Install Chocolatey
|
||||
RUN iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))
|
||||
# Install required packages
|
||||
RUN choco install -y vcredist140
|
||||
RUN choco install -y cmake --installargs 'ADD_CMAKE_TO_PATH=System'
|
||||
RUN choco install -y git --params '/NoAutoCrlf /NoCredentialManager /NoShellHereIntegration /NoGuiHereIntegration /NoShellIntegration'
|
||||
RUN choco install -y git-lfs
|
||||
RUN choco install -y 7zip
|
||||
RUN choco install -y python3
|
||||
RUN choco install -y msys2 --params '/NoPath /NoUpdate /InstallDir:C:\\msys64'
|
||||
|
||||
RUN c:\msys64\usr\bin\bash -lc 'pacman -S --noconfirm mingw-w64-ucrt-x86_64-toolchain ninja'
|
||||
# Visual Studio can't be installed with choco.
|
||||
# It depends on dotnetfx v4.8.0.20190930, which requires a reboot: dotnetfx (exit code 3010)
|
||||
# https://github.com/microsoft/vs-dockerfiles/blob/main/native-desktop/
|
||||
# Set up environment to collect install errors.
|
||||
COPY Install.cmd C:\TEMP\
|
||||
ADD https://aka.ms/vscollect.exe C:\TEMP\collect.exe
|
||||
# Download channel for fixed install.
|
||||
ARG CHANNEL_URL=https://aka.ms/vs/16/release/channel
|
||||
ADD ${CHANNEL_URL} C:\TEMP\VisualStudio.chman
|
||||
# Download and install Build Tools for Visual Studio 2017 for native desktop workload.
|
||||
ADD https://aka.ms/vs/16/release/vs_buildtools.exe C:\TEMP\vs_buildtools.exe
|
||||
RUN C:\TEMP\Install.cmd C:\TEMP\vs_buildtools.exe --quiet --wait --norestart --nocache `
|
||||
--channelUri C:\TEMP\VisualStudio.chman `
|
||||
--installChannelUri C:\TEMP\VisualStudio.chman `
|
||||
--add Microsoft.VisualStudio.Workload.VCTools `
|
||||
--add Microsoft.VisualStudio.Workload.UniversalBuildTools `
|
||||
--add Microsoft.VisualStudio.Component.VC.Tools.ARM64 `
|
||||
--add Microsoft.VisualStudio.Component.VC.Tools.ARM `
|
||||
--add Microsoft.VisualStudio.Component.UWP.VC.ARM64 `
|
||||
--includeRecommended `
|
||||
--installPath C:\BuildTools
|
||||
|
||||
RUN Get-ChildItem C:\BuildTools
|
||||
RUN Get-ChildItem C:\BuildTools\VC\Tools\MSVC
|
||||
RUN Get-ChildItem 'C:\Program Files (x86)\Windows Kits\10\lib'
|
||||
|
||||
RUN pip3 install meson
|
||||
|
||||
RUN 'git config --global user.email "cirunner@gstreamer.freedesktop.org"; git config --global user.name "GStreamer CI system"'
|
||||
|
||||
COPY install_mingw.ps1 C:\
|
||||
RUN C:\install_mingw.ps1
|
||||
|
||||
ARG DEFAULT_BRANCH="main"
|
||||
|
||||
COPY prepare_gst_env.ps1 C:\
|
||||
RUN C:\prepare_gst_env.ps1
|
||||
|
||||
COPY prepare_cerbero_env.sh C:\
|
||||
RUN C:\MinGW\msys\1.0\bin\bash.exe --login -c "C:/prepare_cerbero_env.sh"
|
35
ci/docker/windows/Install.cmd
Normal file
35
ci/docker/windows/Install.cmd
Normal file
@ -0,0 +1,35 @@
|
||||
@REM The MIT License (MIT)
|
||||
@REM Copyright (c) Microsoft Corporation
|
||||
|
||||
@REM Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
|
||||
@REM associated documentation files (the "Software"), to deal in the Software without restriction,
|
||||
@REM including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
@REM and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
@REM subject to the following conditions:
|
||||
|
||||
@REM The above copyright notice and this permission notice shall be included in all copies or substantial
|
||||
@REM portions of the Software.
|
||||
|
||||
@REM THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
|
||||
@REM NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
@REM IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
@REM WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
@REM SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
@if not defined _echo echo off
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
call %*
|
||||
if "%ERRORLEVEL%"=="3010" (
|
||||
exit /b 0
|
||||
) else (
|
||||
if not "%ERRORLEVEL%"=="0" (
|
||||
set ERR=%ERRORLEVEL%
|
||||
if "%CI_PROJECT_PATH%"=="" (
|
||||
call C:\TEMP\collect.exe -zip:C:\vslogs.zip
|
||||
) else (
|
||||
call C:\TEMP\collect.exe -zip:%CI_PROJECT_PATH%\vslogs.zip
|
||||
)
|
||||
exit /b !ERR!
|
||||
)
|
||||
)
|
27
ci/docker/windows/build_image.ps1
Normal file
27
ci/docker/windows/build_image.ps1
Normal file
@ -0,0 +1,27 @@
|
||||
$env:ErrorActionPreference='Stop'
|
||||
|
||||
$env:DEFAULT_BRANCH='main'
|
||||
$env:VERSION='test'
|
||||
$env:tag ="registry.freedesktop.org/gstreamer/gst-ci/amd64/windows:$env:VERSION-$env:DEFAULT_BRANCH"
|
||||
$env:rust_tag ="registry.freedesktop.org/gstreamer/gst-ci/amd64/windows-rust:$env:VERSION-$env:DEFAULT_BRANCH"
|
||||
|
||||
Set-Location './docker/windows/'
|
||||
|
||||
Get-Date
|
||||
Write-Output "Building $env:tag"
|
||||
docker build --isolation=hyperv -m 12g --build-arg DEFAULT_BRANCH=$env:DEFAULT_BRANCH -f Dockerfile -t $env:tag .
|
||||
if (!$?) {
|
||||
Write-Host "Failed to build docker image $env:tag"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Output "Building $env:rust_tag"
|
||||
docker build --isolation=hyperv -m 12g --build-arg DEFAULT_BRANCH=$env:DEFAULT_BRANCH -f rust.Dockerfile -t $env:rust_tag .
|
||||
if (!$?) {
|
||||
Write-Host "Failed to build docker image $env:rust_tag"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Output "Build Finished"
|
60
ci/docker/windows/container.ps1
Normal file
60
ci/docker/windows/container.ps1
Normal file
@ -0,0 +1,60 @@
|
||||
# Copied from mesa, big kudos
|
||||
#
|
||||
# https://gitlab.freedesktop.org/mesa/mesa/-/blob/master/.gitlab-ci/windows/mesa_container.ps1
|
||||
# https://gitlab.freedesktop.org/mesa/mesa/-/blob/34e3e164936d1d3cef267da7780e87f062fedf39/.gitlab-ci/windows/mesa_container.ps1
|
||||
|
||||
# Implements the equivalent of ci-templates container-ifnot-exists, using
|
||||
# Docker directly as we don't have buildah/podman/skopeo available under
|
||||
# Windows, nor can we execute Docker-in-Docker
|
||||
$registry_uri = $args[0]
|
||||
$registry_username = $args[1]
|
||||
$registry_password = $args[2]
|
||||
$registry_user_image = $args[3]
|
||||
$registry_central_image = $args[4]
|
||||
$dockerfile = $args[5]
|
||||
|
||||
docker --config "windows-docker.conf" login -u "$registry_username" -p "$registry_password" "$registry_uri"
|
||||
if (!$?) {
|
||||
Write-Host "docker login failed to $registry_uri"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
# if the image already exists, don't rebuild it
|
||||
docker --config "windows-docker.conf" pull "$registry_user_image"
|
||||
if ($?) {
|
||||
Write-Host "User image $registry_user_image already exists; not rebuilding"
|
||||
docker --config "windows-docker.conf" logout "$registry_uri"
|
||||
Exit 0
|
||||
}
|
||||
|
||||
# if the image already exists upstream, copy it
|
||||
docker --config "windows-docker.conf" pull "$registry_central_image"
|
||||
if ($?) {
|
||||
Write-Host "Copying central image $registry_central_image to user image $registry_user_image"
|
||||
docker --config "windows-docker.conf" tag "$registry_central_image" "$registry_user_image"
|
||||
docker --config "windows-docker.conf" push "$registry_user_image"
|
||||
$pushstatus = $?
|
||||
docker --config "windows-docker.conf" logout "$registry_uri"
|
||||
if (!$pushstatus) {
|
||||
Write-Host "Pushing image to $registry_user_image failed"
|
||||
Exit 1
|
||||
}
|
||||
Exit 0
|
||||
}
|
||||
|
||||
Write-Host "No image found at $registry_user_image or $registry_central_image; rebuilding"
|
||||
docker --config "windows-docker.conf" build $DOCKER_BUILD_ARGS --no-cache -t "$registry_user_image" -f "$dockerfile" "./ci/docker/windows"
|
||||
if (!$?) {
|
||||
Write-Host "Container build failed"
|
||||
docker --config "windows-docker.conf" logout "$registry_uri"
|
||||
Exit 1
|
||||
}
|
||||
Get-Date
|
||||
|
||||
docker --config "windows-docker.conf" push "$registry_user_image"
|
||||
$pushstatus = $?
|
||||
docker --config "windows-docker.conf" logout "$registry_uri"
|
||||
if (!$pushstatus) {
|
||||
Write-Host "Pushing image to $registry_user_image failed"
|
||||
Exit 1
|
||||
}
|
72
ci/docker/windows/install_gst.ps1
Normal file
72
ci/docker/windows/install_gst.ps1
Normal file
@ -0,0 +1,72 @@
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;
|
||||
|
||||
# Download gst-build and all its subprojects
|
||||
# git clone -b $env:DEFAULT_BRANCH https://gitlab.freedesktop.org/gstreamer/gst-build.git C:\gst-build
|
||||
# FIXME: need 1.19+ for cairo subproject :/
|
||||
# Should use a stable branch instead
|
||||
git clone -b master --depth 1 https://gitlab.freedesktop.org/gstreamer/gst-build.git C:\gst-build
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone gst-build"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Set-Location C:\gst-build
|
||||
|
||||
# Copy the cache we already have in the image to avoid massive redownloads
|
||||
Move-Item C:/subprojects/* C:\gst-build\subprojects
|
||||
|
||||
if (!$?) {
|
||||
Write-Host "Failed to copy subprojects cache"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
# Update the subprojects cache
|
||||
Write-Output "Running meson subproject reset"
|
||||
meson subprojects update --reset
|
||||
|
||||
if (!$?) {
|
||||
Write-Host "Failed to reset subprojects state"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Write-Output "Running git update"
|
||||
python git-update --no-interaction
|
||||
|
||||
if (!$?) {
|
||||
Write-Host "Failed to run git-update"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
$env:MESON_ARGS = "-Dglib:installed_tests=false " +
|
||||
"-Dlibnice:tests=disabled " +
|
||||
"-Dlibnice:examples=disabled " +
|
||||
"-Dffmpeg:tests=disabled " +
|
||||
"-Dopenh264:tests=disabled " +
|
||||
"-Dpygobject:tests=false " +
|
||||
"-Dugly=enabled " +
|
||||
"-Dbad=enabled " +
|
||||
"-Dges=enabled " +
|
||||
"-Drtsp_server=enabled " +
|
||||
"-Ddevtools=enabled " +
|
||||
"-Dsharp=disabled " +
|
||||
"-Dpython=disabled " +
|
||||
"-Dlibav=disabled " +
|
||||
"-Dvaapi=disabled " +
|
||||
"-Dgst-plugins-base:pango=enabled " +
|
||||
"-Dgst-plugins-good:cairo=enabled " +
|
||||
"-Dgpl=enabled "
|
||||
|
||||
Write-Output "Building gst"
|
||||
cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && meson _build $env:MESON_ARGS && meson compile -C _build && ninja -C _build install"
|
||||
|
||||
if (!$?) {
|
||||
Write-Host "Failed to build and install gst"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
git clean -fdxx
|
||||
|
||||
if (!$?) {
|
||||
Write-Host "Failed to git clean"
|
||||
Exit 1
|
||||
}
|
29
ci/docker/windows/install_mingw.ps1
Normal file
29
ci/docker/windows/install_mingw.ps1
Normal file
@ -0,0 +1,29 @@
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;
|
||||
|
||||
$msys_mingw_get_url = 'https://dotsrc.dl.osdn.net/osdn/mingw/68260/mingw-get-0.6.3-mingw32-pre-20170905-1-bin.tar.xz'
|
||||
|
||||
Get-Date
|
||||
Write-Host "Downloading and extracting mingw-get for MSYS"
|
||||
Invoke-WebRequest -Uri $msys_mingw_get_url -OutFile C:\mingw-get.tar.xz
|
||||
7z e C:\mingw-get.tar.xz -o"C:\\"
|
||||
$res1 = $?
|
||||
7z x C:\mingw-get.tar -o"C:\\MinGW"
|
||||
$res2 = $?
|
||||
|
||||
if (!($res1 -and $res2)) {
|
||||
Write-Host "Failed to extract mingw-get"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Remove-Item C:\mingw-get.tar.xz -Force
|
||||
Remove-Item C:\mingw-get.tar -Force
|
||||
|
||||
Get-Date
|
||||
Write-Host "Installing MSYS for Cerbero into C:/MinGW using mingw-get"
|
||||
Start-Process -Wait C:\MinGW\bin\mingw-get.exe -ArgumentList 'install msys-base mingw32-base mingw-developer-toolkit'
|
||||
if (!$?) {
|
||||
Write-Host "Failed to install Msys for cerbero using MinGW"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Write-Host "MSYS/MinGW Install Complete"
|
27
ci/docker/windows/prepare_cerbero_env.sh
Normal file
27
ci/docker/windows/prepare_cerbero_env.sh
Normal file
@ -0,0 +1,27 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -eux
|
||||
|
||||
cd C:/
|
||||
git clone -b ${DEFAULT_BRANCH} https://gitlab.freedesktop.org/gstreamer/cerbero.git
|
||||
cd cerbero
|
||||
|
||||
echo 'local_sources="C:/cerbero/cerbero-sources"' > localconf.cbc
|
||||
echo 'home_dir="C:/cerbero/cerbero-build"' >> localconf.cbc
|
||||
echo 'vs_install_path = "C:/BuildTools"' >> localconf.cbc
|
||||
echo 'vs_install_version = "vs15"' >> localconf.cbc
|
||||
|
||||
# Fetch all bootstrap requirements
|
||||
./cerbero-uninstalled -t -c localconf.cbc -c config/win64.cbc fetch-bootstrap --jobs=4
|
||||
# Fetch all package requirements for a mingw gstreamer build
|
||||
./cerbero-uninstalled -t -c localconf.cbc -c config/win64.cbc fetch-package --jobs=4 gstreamer-1.0
|
||||
# Fetch all package requirements for a visualstudio gstreamer build
|
||||
./cerbero-uninstalled -t -v visualstudio -c localconf.cbc -c config/win64.cbc fetch-package --jobs=4 gstreamer-1.0
|
||||
|
||||
# Fixup the MSYS installation
|
||||
./cerbero-uninstalled -t -c localconf.cbc -c config/win64.cbc bootstrap -y --build-tools=no --toolchains=no --offline
|
||||
|
||||
# Wipe visualstudio package dist, sources, logs, and the build tools recipes
|
||||
./cerbero-uninstalled -t -v visualstudio -c localconf.cbc -c config/win64.cbc wipe --force --build-tools
|
||||
# clean the localconf
|
||||
rm /c/cerbero/localconf.cbc
|
17
ci/docker/windows/prepare_gst_env.ps1
Normal file
17
ci/docker/windows/prepare_gst_env.ps1
Normal file
@ -0,0 +1,17 @@
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;
|
||||
|
||||
# FIXME: Python fails to validate github.com SSL certificate, unless we first
|
||||
# run a dummy download to force refreshing Windows' CA database.
|
||||
# See: https://bugs.python.org/issue36137
|
||||
(New-Object System.Net.WebClient).DownloadString("https://github.com") >$null
|
||||
|
||||
Write-Host "Cloning GStreamer"
|
||||
git clone -b $env:DEFAULT_BRANCH https://gitlab.freedesktop.org/gstreamer/gstreamer.git C:\gstreamer
|
||||
|
||||
# download the subprojects to try and cache them
|
||||
Write-Host "Downloading subprojects"
|
||||
meson subprojects download --sourcedir C:\gstreamer
|
||||
|
||||
Write-Host "Caching subprojects into /subprojects/"
|
||||
python C:\gstreamer/ci/scripts/handle-subprojects-cache.py --build C:\gstreamer/subprojects/
|
||||
Remove-Item -Recurse -Force C:\gstreamer
|
21
ci/docker/windows/rust.Dockerfile
Normal file
21
ci/docker/windows/rust.Dockerfile
Normal file
@ -0,0 +1,21 @@
|
||||
# escape=`
|
||||
|
||||
# Expect this to be set when calling docker build with
|
||||
# --build-arg BASE_IMAGE="" and make it fail if not set.
|
||||
ARG BASE_IMAGE="inavlid.gstreamer.freedesktop.org/invalid"
|
||||
FROM $BASE_IMAGE
|
||||
|
||||
ARG DEFAULT_BRANCH="main"
|
||||
ARG RUST_VERSION="invalid"
|
||||
|
||||
COPY install_gst.ps1 C:\
|
||||
RUN C:\install_gst.ps1
|
||||
RUN choco install -y pkgconfiglite
|
||||
ENV PKG_CONFIG_PATH="C:/lib/pkgconfig"
|
||||
|
||||
ADD https://win.rustup.rs/x86_64 C:\rustup-init.exe
|
||||
RUN C:\rustup-init.exe -y --profile minimal --default-toolchain $env:RUST_VERSION
|
||||
|
||||
# Uncomment for easy testing
|
||||
# RUN git clone --depth 1 https://gitlab.freedesktop.org/gstreamer/gstreamer-rs.git
|
||||
# RUN cd gstreamer-rs; cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64; cargo build --all; cargo test --all"
|
80
ci/fuzzing/README.txt
Normal file
80
ci/fuzzing/README.txt
Normal file
@ -0,0 +1,80 @@
|
||||
Fuzzing GStreamer
|
||||
=================
|
||||
|
||||
This directory contains the various fuzzing targets and helper
|
||||
scripts.
|
||||
|
||||
* Fuzzing targets
|
||||
|
||||
Fuzzing targets as small applications where we can test a specific
|
||||
element or API. The goal is to have them be as small/targetted as
|
||||
possible.
|
||||
|
||||
ex: appsrc ! <some_element> ! fakesink num-buffers=<small>
|
||||
|
||||
Not all components can be tested directly and therefore will be
|
||||
indirectly tested via other targets (ex: libgstaudio will be tested
|
||||
by targets/elements requiring it)
|
||||
|
||||
Anything that can process externally-provided data should be
|
||||
covered, but there are cases where it might not make sense to use a
|
||||
fuzzer (such as most elements processing raw audio/video).
|
||||
|
||||
* build-oss-fuzz.sh
|
||||
|
||||
This is the script executed by the oss-fuzz project.
|
||||
|
||||
It builds glib, GStreamer, plugins and the fuzzing targets.
|
||||
|
||||
* *.c
|
||||
|
||||
The fuzzing targets where the data to test will be provided to a
|
||||
function whose signature follows the LibFuzzer signature:
|
||||
https://llvm.org/docs/LibFuzzer.html
|
||||
|
||||
* TODO
|
||||
|
||||
* Add a standalone build script
|
||||
|
||||
We need to be able to build and test the fuzzing targets outside
|
||||
of the oss-fuzz infrastructure, and do that in our continous
|
||||
integration system.
|
||||
|
||||
We need:
|
||||
|
||||
* A dummy fuzzing engine (given a directory, it opens all files and
|
||||
calls the fuzzing targets with the content of those files.
|
||||
* A script to be able to build those targets with that dummy engine
|
||||
* A corpus of files to test those targets with.
|
||||
|
||||
* Build targets with dummy engine and run with existing tests.
|
||||
|
||||
* Create pull-based variants
|
||||
|
||||
Currently the existing targets are push-based only. Where
|
||||
applicable we should make pull-based variants to test the other
|
||||
code paths.
|
||||
|
||||
* Add more targets
|
||||
|
||||
core:
|
||||
gst_parse fuzzer ?
|
||||
base:
|
||||
ext/
|
||||
ogg
|
||||
opus
|
||||
pango
|
||||
theora
|
||||
vorbis
|
||||
gst/
|
||||
subparse
|
||||
typefind : already covered in typefind target
|
||||
gst-libs/gst/
|
||||
sdp
|
||||
other ones easily testable directly ?
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
207
ci/fuzzing/build-oss-fuzz.sh
Executable file
207
ci/fuzzing/build-oss-fuzz.sh
Executable file
@ -0,0 +1,207 @@
|
||||
#!/bin/bash -eu
|
||||
|
||||
# build-oss-fuzz.sh
|
||||
#
|
||||
# Build script which is executed by oss-fuzz build.sh
|
||||
#
|
||||
# $SRC: location of code checkouts
|
||||
# $OUT: location to put fuzzing targets and corpus
|
||||
# $WORK: writable directory where all compilation should be executed
|
||||
#
|
||||
# /!\ Do not override any CC, CXX, CFLAGS, ... variables
|
||||
#
|
||||
|
||||
# This script is divided in two parts
|
||||
#
|
||||
# 1) Build all the dependencies statically
|
||||
#
|
||||
# 2) Build the fuzzing targets
|
||||
|
||||
# Prefix where we will temporarily install everything
|
||||
PREFIX=$WORK/prefix
|
||||
mkdir -p $PREFIX
|
||||
# always try getting the arguments for static compilation/linking
|
||||
# Fixes GModule not being picked when gstreamer-1.0.pc is looked up by meson
|
||||
# more or less https://github.com/mesonbuild/meson/pull/6629
|
||||
export PKG_CONFIG="`which pkg-config` --static"
|
||||
export PKG_CONFIG_PATH=$PREFIX/lib/pkgconfig
|
||||
export PATH=$PREFIX/bin:$PATH
|
||||
|
||||
# Minimize gst-debug level/code
|
||||
export CFLAGS="$CFLAGS -DGST_LEVEL_MAX=2"
|
||||
|
||||
#
|
||||
echo "CFLAGS : " $CFLAGS
|
||||
echo "CXXFLAGS : " $CXXFLAGS
|
||||
PLUGIN_DIR=$PREFIX/lib/gstreamer-1.0
|
||||
|
||||
rm -rf $WORK/*
|
||||
|
||||
# Switch to work directory
|
||||
cd $WORK
|
||||
|
||||
# 1) BUILD GLIB AND GSTREAMER
|
||||
# Note: we build glib ourselves so that we get proper malloc/free backtraces
|
||||
tar xvJf $SRC/glib-2.64.2.tar.xz
|
||||
cd glib-2.64.2
|
||||
# options taken from glib's oss-fuzz build definition
|
||||
meson \
|
||||
--prefix=$PREFIX \
|
||||
--libdir=lib \
|
||||
--default-library=static \
|
||||
-Db_lundef=false \
|
||||
-Doss_fuzz=enabled \
|
||||
-Dlibmount=disabled \
|
||||
-Dinternal_pcre=true \
|
||||
_builddir
|
||||
ninja -C _builddir
|
||||
ninja -C _builddir install
|
||||
cd ..
|
||||
|
||||
# Note: We don't use/build orc since it still seems to be problematic
|
||||
# with clang and the various sanitizers.
|
||||
|
||||
# For now we only build core and base. Add other modules when/if needed
|
||||
for i in gstreamer;
|
||||
do
|
||||
mkdir -p $i
|
||||
cd $i
|
||||
meson \
|
||||
--prefix=$PREFIX \
|
||||
--libdir=lib \
|
||||
--default-library=static \
|
||||
-Db_lundef=false \
|
||||
-Ddoc=disabled \
|
||||
-Dexamples=disabled \
|
||||
-Dintrospection=disabled \
|
||||
-Dgood=disabled \
|
||||
-Dugly=disabled \
|
||||
-Dbad=disabled \
|
||||
-Dlibav=disabled \
|
||||
-Dges=disabled \
|
||||
-Domx=disabled \
|
||||
-Dvaapi=disabled \
|
||||
-Dsharp=disabled \
|
||||
-Drs=disabled \
|
||||
-Dpython=disabled \
|
||||
-Dlibnice=disabled \
|
||||
-Ddevtools=disabled \
|
||||
-Drtsp_server=disabled \
|
||||
-Dgst-examples=disabled \
|
||||
-Dqt5=disabled \
|
||||
-Dorc=disabled \
|
||||
-Dgtk_doc=disabled \
|
||||
-Dgstreamer:tracer_hooks=false \
|
||||
-Dgstreamer:registry=false \
|
||||
-Dgst-plugins-base:cairo=disabled \
|
||||
-Dgst-plugins-base:opus=disabled \
|
||||
-Dgst-plugins-base:pango=disabled \
|
||||
_builddir \
|
||||
$SRC/$i
|
||||
ninja -C _builddir
|
||||
ninja -C _builddir install
|
||||
cd ..
|
||||
done
|
||||
|
||||
|
||||
|
||||
# 2) Build the target fuzzers
|
||||
|
||||
# All targets will be linked in with $LIB_FUZZING_ENGINE which contains the
|
||||
# actual fuzzing runner. Anything fuzzing engine can be used provided it calls
|
||||
# the same function as libfuzzer.
|
||||
|
||||
# Note: The fuzzer .o needs to be first compiled with CC and then linked with CXX
|
||||
|
||||
# We want to statically link everything, except for shared libraries
|
||||
# that are present on the base image. Those need to be specified
|
||||
# beforehand and explicitely linked dynamically If any of the static
|
||||
# dependencies require a pre-installed shared library, you need to add
|
||||
# that library to the following list
|
||||
PREDEPS_LDFLAGS="-Wl,-Bdynamic -ldl -lm -pthread -lrt -lpthread"
|
||||
|
||||
# These are the basic .pc dependencies required to build any of the fuzzing targets
|
||||
# That is : glib, gstreamer core and gst-app
|
||||
# The extra target-specific dependencies are to be specified later
|
||||
COMMON_DEPS="glib-2.0 gio-2.0 gstreamer-1.0 gstreamer-app-1.0"
|
||||
|
||||
# For each target, defined the following:
|
||||
# TARGET_DEPS : Extra .pc dependencies for the target (in addition to $COMMON_DEPS)
|
||||
# All dependencies (including sub-dependencies) must be speecified
|
||||
# PLUGINS : .a of the plugins to link
|
||||
# They must match the static plugins declared/registered in the target
|
||||
|
||||
#
|
||||
# TARGET : push-based ogg/theora/vorbis discoverer
|
||||
#
|
||||
# FIXME : Rename to discoverer_push_oggtheoravorbis
|
||||
|
||||
TARGET_DEPS=" gstreamer-pbutils-1.0 \
|
||||
gstreamer-video-1.0 \
|
||||
gstreamer-audio-1.0 \
|
||||
gstreamer-riff-1.0 \
|
||||
gstreamer-tag-1.0 \
|
||||
zlib ogg vorbis vorbisenc \
|
||||
theoraenc theoradec theora cairo"
|
||||
|
||||
PLUGINS="$PLUGIN_DIR/libgstcoreelements.a \
|
||||
$PLUGIN_DIR/libgsttypefindfunctions.a \
|
||||
$PLUGIN_DIR/libgstplayback.a \
|
||||
$PLUGIN_DIR/libgstapp.a \
|
||||
$PLUGIN_DIR/libgstvorbis.a \
|
||||
$PLUGIN_DIR/libgsttheora.a \
|
||||
$PLUGIN_DIR/libgstogg.a"
|
||||
|
||||
echo
|
||||
echo ">>>> BUILDING gst-discoverer"
|
||||
echo
|
||||
BUILD_CFLAGS="$CFLAGS `pkg-config --static --cflags $COMMON_DEPS $TARGET_DEPS`"
|
||||
BUILD_LDFLAGS="-Wl,-static `pkg-config --static --libs $COMMON_DEPS $TARGET_DEPS`"
|
||||
|
||||
$CC $CFLAGS $BUILD_CFLAGS -c $SRC/gstreamer/ci/fuzzing/gst-discoverer.c -o $SRC/gstreamer/ci/fuzzing/gst-discoverer.o
|
||||
$CXX $CXXFLAGS \
|
||||
-o $OUT/gst-discoverer \
|
||||
$PREDEPS_LDFLAGS \
|
||||
$SRC/gstreamer/ci/fuzzing/gst-discoverer.o \
|
||||
$PLUGINS \
|
||||
$BUILD_LDFLAGS \
|
||||
$LIB_FUZZING_ENGINE \
|
||||
-Wl,-Bdynamic
|
||||
|
||||
#
|
||||
# TARGET : push-based typefind
|
||||
#
|
||||
|
||||
# typefindfunction depends on pbutils which depends on gst{audio|video|tag}
|
||||
TARGET_DEPS=" gstreamer-pbutils-1.0 \
|
||||
gstreamer-video-1.0 \
|
||||
gstreamer-audio-1.0 \
|
||||
gstreamer-tag-1.0"
|
||||
|
||||
PLUGINS="$PLUGIN_DIR/libgstcoreelements.a \
|
||||
$PLUGIN_DIR/libgsttypefindfunctions.a \
|
||||
$PLUGIN_DIR/libgstapp.a"
|
||||
|
||||
echo
|
||||
echo ">>>> BUILDING typefind"
|
||||
echo
|
||||
BUILD_CFLAGS="$CFLAGS `pkg-config --static --cflags $COMMON_DEPS $TARGET_DEPS`"
|
||||
BUILD_LDFLAGS="-Wl,-static `pkg-config --static --libs $COMMON_DEPS $TARGET_DEPS`"
|
||||
|
||||
$CC $CFLAGS $BUILD_CFLAGS -c $SRC/gstreamer/ci/fuzzing/typefind.c -o $SRC/gstreamer/ci/fuzzing/typefind.o
|
||||
$CXX $CXXFLAGS \
|
||||
-o $OUT/typefind \
|
||||
$PREDEPS_LDFLAGS \
|
||||
$SRC/gstreamer/ci/fuzzing/typefind.o \
|
||||
$PLUGINS \
|
||||
$BUILD_LDFLAGS \
|
||||
$LIB_FUZZING_ENGINE \
|
||||
-Wl,-Bdynamic
|
||||
|
||||
echo
|
||||
echo ">>>> Installing seed corpus"
|
||||
echo
|
||||
# FIXME : Sadly we apparently need to have the corpus downloaded in the
|
||||
# Dockerfile and not here.
|
||||
|
||||
cp $SRC/*_seed_corpus.zip $OUT
|
137
ci/fuzzing/gst-discoverer.c
Normal file
137
ci/fuzzing/gst-discoverer.c
Normal file
@ -0,0 +1,137 @@
|
||||
/*
|
||||
* Copyright 2016 Google Inc.
|
||||
* author: Edward Hervey <bilboed@bilboed.com>
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include "config.h"
|
||||
#endif
|
||||
|
||||
#include <locale.h>
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <glib.h>
|
||||
#include <gst/gst.h>
|
||||
#include <gst/pbutils/pbutils.h>
|
||||
|
||||
#ifndef LOCAL_FUZZ_BUILD
|
||||
GST_PLUGIN_STATIC_DECLARE (coreelements);
|
||||
GST_PLUGIN_STATIC_DECLARE (playback);
|
||||
GST_PLUGIN_STATIC_DECLARE (typefindfunctions);
|
||||
GST_PLUGIN_STATIC_DECLARE (app);
|
||||
GST_PLUGIN_STATIC_DECLARE (ogg);
|
||||
GST_PLUGIN_STATIC_DECLARE (theora);
|
||||
GST_PLUGIN_STATIC_DECLARE (vorbis);
|
||||
#endif
|
||||
|
||||
/* push-based discoverer fuzzing target
|
||||
*
|
||||
* This application can be compiled with libFuzzer to simulate
|
||||
* a push-based discoverer execution.
|
||||
*
|
||||
* To reproduce the failing behaviour, use:
|
||||
* $ gst-discoverer-1.0 pushfile:///...
|
||||
*
|
||||
* The goal is to cover basic usage of demuxers, parsers and
|
||||
* base decoder elements.
|
||||
*
|
||||
* When compiling, only link the required demuxer/parser/decoder
|
||||
* plugins and keep it to a limited range (ex: ogg/theora/vorbis)
|
||||
*
|
||||
**/
|
||||
|
||||
const guint8 *fuzztesting_data;
|
||||
size_t fuzztesting_size;
|
||||
|
||||
static void
|
||||
appsrc_configuration (GstDiscoverer * dc, GstElement * source, gpointer data)
|
||||
{
|
||||
GstBuffer *buf;
|
||||
GstFlowReturn ret;
|
||||
|
||||
/* Create buffer from fuzztesting_data which shouldn't be freed */
|
||||
buf =
|
||||
gst_buffer_new_wrapped_full (0, (gpointer) fuzztesting_data,
|
||||
fuzztesting_size, 0, fuzztesting_size, NULL, NULL);
|
||||
g_object_set (G_OBJECT (source), "size", fuzztesting_size, NULL);
|
||||
g_signal_emit_by_name (G_OBJECT (source), "push-buffer", buf, &ret);
|
||||
gst_buffer_unref (buf);
|
||||
}
|
||||
|
||||
static void
|
||||
custom_logger (const gchar * log_domain,
|
||||
GLogLevelFlags log_level, const gchar * message, gpointer unused_data)
|
||||
{
|
||||
if (log_level & G_LOG_LEVEL_CRITICAL) {
|
||||
g_printerr ("CRITICAL ERROR : %s\n", message);
|
||||
abort ();
|
||||
} else if (log_level & G_LOG_LEVEL_WARNING) {
|
||||
g_printerr ("WARNING : %s\n", message);
|
||||
}
|
||||
}
|
||||
|
||||
int
|
||||
LLVMFuzzerTestOneInput (const guint8 * data, size_t size)
|
||||
{
|
||||
GError *err = NULL;
|
||||
GstDiscoverer *dc;
|
||||
gint timeout = 10;
|
||||
GstDiscovererInfo *info;
|
||||
static gboolean initialized = FALSE;
|
||||
|
||||
if (!initialized) {
|
||||
/* We want critical warnings to assert so we can fix them */
|
||||
g_log_set_always_fatal (G_LOG_LEVEL_CRITICAL);
|
||||
g_log_set_default_handler (custom_logger, NULL);
|
||||
|
||||
/* Only initialize and register plugins once */
|
||||
gst_init (NULL, NULL);
|
||||
|
||||
#ifndef LOCAL_FUZZ_BUILD
|
||||
GST_PLUGIN_STATIC_REGISTER (coreelements);
|
||||
GST_PLUGIN_STATIC_REGISTER (playback);
|
||||
GST_PLUGIN_STATIC_REGISTER (typefindfunctions);
|
||||
GST_PLUGIN_STATIC_REGISTER (app);
|
||||
GST_PLUGIN_STATIC_REGISTER (ogg);
|
||||
GST_PLUGIN_STATIC_REGISTER (theora);
|
||||
GST_PLUGIN_STATIC_REGISTER (vorbis);
|
||||
#endif
|
||||
|
||||
initialized = TRUE;
|
||||
}
|
||||
|
||||
dc = gst_discoverer_new (timeout * GST_SECOND, &err);
|
||||
if (G_UNLIKELY (dc == NULL)) {
|
||||
g_print ("Error initializing: %s\n", err->message);
|
||||
g_clear_error (&err);
|
||||
exit (1);
|
||||
}
|
||||
|
||||
fuzztesting_data = data;
|
||||
fuzztesting_size = size;
|
||||
|
||||
/* Connect to source-setup signal to give the data */
|
||||
g_signal_connect (dc, "source-setup", (GCallback) appsrc_configuration, NULL);
|
||||
|
||||
info = gst_discoverer_discover_uri (dc, "appsrc://", &err);
|
||||
g_clear_error (&err);
|
||||
if (info)
|
||||
gst_discoverer_info_unref (info);
|
||||
|
||||
g_object_unref (dc);
|
||||
|
||||
return 0;
|
||||
}
|
78
ci/fuzzing/localfuzzer.c
Normal file
78
ci/fuzzing/localfuzzer.c
Normal file
@ -0,0 +1,78 @@
|
||||
/* GStreamer
|
||||
* Copyright (C) 2017 Edward Hervey <bilboed@bilboed.com>
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Library General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2 of the License, or (at your option) any later version.
|
||||
*
|
||||
* This library is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Library General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Library General Public
|
||||
* License along with this library; if not, write to the
|
||||
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
||||
* Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
/* Local fuzzer runner */
|
||||
#include <glib.h>
|
||||
|
||||
extern int LLVMFuzzerTestOneInput (const guint8 * data, size_t size);
|
||||
|
||||
static void
|
||||
test_file (gchar * filename)
|
||||
{
|
||||
GDir *dir;
|
||||
gchar *path;
|
||||
gchar *contents;
|
||||
gsize length;
|
||||
|
||||
/* if filename is a directory, process the contents */
|
||||
if ((dir = g_dir_open (filename, 0, NULL))) {
|
||||
const gchar *entry;
|
||||
|
||||
while ((entry = g_dir_read_name (dir))) {
|
||||
gchar *spath;
|
||||
|
||||
spath = g_strconcat (filename, G_DIR_SEPARATOR_S, entry, NULL);
|
||||
test_file (spath);
|
||||
g_free (spath);
|
||||
}
|
||||
|
||||
g_dir_close (dir);
|
||||
return;
|
||||
}
|
||||
|
||||
/* Make sure path is absolute */
|
||||
if (!g_path_is_absolute (filename)) {
|
||||
gchar *curdir;
|
||||
|
||||
curdir = g_get_current_dir ();
|
||||
path = g_build_filename (curdir, filename, NULL);
|
||||
g_free (curdir);
|
||||
} else
|
||||
path = g_strdup (filename);
|
||||
|
||||
/* Check if path exists */
|
||||
if (g_file_get_contents (path, &contents, &length, NULL)) {
|
||||
g_print (">>> %s (%" G_GSIZE_FORMAT " bytes)\n", path, length);
|
||||
LLVMFuzzerTestOneInput ((const guint8 *) contents, length);
|
||||
g_free (contents);
|
||||
}
|
||||
|
||||
g_free (path);
|
||||
}
|
||||
|
||||
int
|
||||
main (int argc, gchar ** argv)
|
||||
{
|
||||
gint i;
|
||||
|
||||
for (i = 1; i < argc; i++)
|
||||
test_file (argv[i]);
|
||||
|
||||
return 0;
|
||||
}
|
115
ci/fuzzing/typefind.c
Normal file
115
ci/fuzzing/typefind.c
Normal file
@ -0,0 +1,115 @@
|
||||
/*
|
||||
* Copyright 2016 Google Inc.
|
||||
* author: Edward Hervey <bilboed@bilboed.com>
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include "config.h"
|
||||
#endif
|
||||
|
||||
#include <locale.h>
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <glib.h>
|
||||
#include <gst/gst.h>
|
||||
|
||||
#ifndef LOCAL_FUZZ_BUILD
|
||||
GST_PLUGIN_STATIC_DECLARE (coreelements);
|
||||
GST_PLUGIN_STATIC_DECLARE (typefindfunctions);
|
||||
GST_PLUGIN_STATIC_DECLARE (app);
|
||||
#endif
|
||||
|
||||
/* push-based typefind fuzzing target
|
||||
*
|
||||
* This application can be compiled with libFuzzer to simulate
|
||||
* a push-based typefind execution.
|
||||
*
|
||||
* To reproduce the failing behaviour, use:
|
||||
* $ gst-launch-1.0 pushfile:///.. ! typefind ! fakesink
|
||||
*
|
||||
* The goal is to cover typefind code and implementation.
|
||||
*
|
||||
**/
|
||||
static void
|
||||
custom_logger (const gchar * log_domain,
|
||||
GLogLevelFlags log_level, const gchar * message, gpointer unused_data)
|
||||
{
|
||||
if (log_level & G_LOG_LEVEL_CRITICAL) {
|
||||
g_printerr ("CRITICAL ERROR : %s\n", message);
|
||||
abort ();
|
||||
} else if (log_level & G_LOG_LEVEL_WARNING) {
|
||||
g_printerr ("WARNING : %s\n", message);
|
||||
}
|
||||
}
|
||||
|
||||
int
|
||||
LLVMFuzzerTestOneInput (const guint8 * data, size_t size)
|
||||
{
|
||||
GError *err = NULL;
|
||||
static gboolean initialized = FALSE;
|
||||
GstElement *pipeline, *source, *typefind, *fakesink;
|
||||
GstBuffer *buf;
|
||||
GstFlowReturn flowret;
|
||||
GstState state;
|
||||
|
||||
if (!initialized) {
|
||||
/* We want critical warnings to assert so we can fix them */
|
||||
g_log_set_always_fatal (G_LOG_LEVEL_CRITICAL);
|
||||
g_log_set_default_handler (custom_logger, NULL);
|
||||
|
||||
/* Only initialize and register plugins once */
|
||||
gst_init (NULL, NULL);
|
||||
|
||||
#ifndef LOCAL_FUZZ_BUILD
|
||||
GST_PLUGIN_STATIC_REGISTER (coreelements);
|
||||
GST_PLUGIN_STATIC_REGISTER (typefindfunctions);
|
||||
GST_PLUGIN_STATIC_REGISTER (app);
|
||||
#endif
|
||||
|
||||
initialized = TRUE;
|
||||
}
|
||||
|
||||
/* Create the pipeline */
|
||||
pipeline = gst_pipeline_new ("pipeline");
|
||||
source = gst_element_factory_make ("appsrc", "source");
|
||||
typefind = gst_element_factory_make ("typefind", "typefind");
|
||||
fakesink = gst_element_factory_make ("fakesink", "fakesink");
|
||||
|
||||
gst_bin_add_many (GST_BIN (pipeline), source, typefind, fakesink, NULL);
|
||||
gst_element_link_many (source, typefind, fakesink, NULL);
|
||||
|
||||
/* Set pipeline to READY so we can provide data to appsrc */
|
||||
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_READY);
|
||||
buf = gst_buffer_new_wrapped_full (0, (gpointer) data, size,
|
||||
0, size, NULL, NULL);
|
||||
g_object_set (G_OBJECT (source), "size", size, NULL);
|
||||
g_signal_emit_by_name (G_OBJECT (source), "push-buffer", buf, &flowret);
|
||||
gst_buffer_unref (buf);
|
||||
|
||||
/* Set pipeline to PAUSED and wait (typefind will either fail or succeed) */
|
||||
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED);
|
||||
|
||||
/* wait until state change either completes or fails */
|
||||
gst_element_get_state (GST_ELEMENT (pipeline), &state, NULL, -1);
|
||||
|
||||
/* Go back to NULL */
|
||||
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
|
||||
|
||||
/* And release the pipeline */
|
||||
gst_object_unref (pipeline);
|
||||
|
||||
return 0;
|
||||
}
|
270
ci/gitlab/build_manifest.py
Executable file
270
ci/gitlab/build_manifest.py
Executable file
@ -0,0 +1,270 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import json
|
||||
|
||||
from typing import Dict, Tuple, List
|
||||
# from pprint import pprint
|
||||
|
||||
if sys.version_info < (3, 6):
|
||||
raise SystemExit('Need Python 3.6 or newer')
|
||||
|
||||
GSTREAMER_MODULES: List[str] = [
|
||||
'orc',
|
||||
'cerbero',
|
||||
'gst-build',
|
||||
'gstreamer',
|
||||
'gst-plugins-base',
|
||||
'gst-plugins-good',
|
||||
'gst-plugins-bad',
|
||||
'gst-plugins-ugly',
|
||||
'gst-libav',
|
||||
'gst-devtools',
|
||||
'gst-docs',
|
||||
'gst-editing-services',
|
||||
'gst-omx',
|
||||
'gst-python',
|
||||
'gst-rtsp-server',
|
||||
'gstreamer-sharp',
|
||||
'gstreamer-vaapi',
|
||||
'gst-integration-testsuites',
|
||||
'gst-examples',
|
||||
]
|
||||
|
||||
MANIFEST_TEMPLATE: str = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<manifest>
|
||||
<remote fetch="{}" name="user"/>
|
||||
<remote fetch="https://gitlab.freedesktop.org/gstreamer/" name="origin"/>
|
||||
{}
|
||||
</manifest>"""
|
||||
|
||||
|
||||
CERBERO_DEPS_LOGS_TARGETS = (
|
||||
('cross-ios', 'universal'),
|
||||
('cross-windows-mingw', 'x86'),
|
||||
('cross-windows-mingw', 'x86_64'),
|
||||
('cross-android', 'universal'),
|
||||
('fedora', 'x86_64'),
|
||||
('macos', 'x86_64'),
|
||||
('windows-msvc', 'x86_64'),
|
||||
)
|
||||
|
||||
# Disallow git prompting for a username/password
|
||||
os.environ['GIT_TERMINAL_PROMPT'] = '0'
|
||||
def git(*args, repository_path='.'):
|
||||
return subprocess.check_output(["git"] + list(args), cwd=repository_path).decode()
|
||||
|
||||
def get_cerbero_last_build_info (branch : str):
|
||||
# Fetch the deps log for all (distro, arch) targets
|
||||
all_commits = {}
|
||||
for distro, arch in CERBERO_DEPS_LOGS_TARGETS:
|
||||
url = f'https://artifacts.gstreamer-foundation.net/cerbero-deps/{branch}/{distro}/{arch}/cerbero-deps.log'
|
||||
print(f'Fetching {url}')
|
||||
try:
|
||||
req = urllib.request.Request(url)
|
||||
resp = urllib.request.urlopen(req);
|
||||
deps = json.loads(resp.read())
|
||||
except urllib.error.URLError as e:
|
||||
print(f'WARNING: Failed to GET {url}: {e!s}')
|
||||
continue
|
||||
|
||||
for dep in deps:
|
||||
commit = dep['commit']
|
||||
if commit not in all_commits:
|
||||
all_commits[commit] = []
|
||||
all_commits[commit].append((distro, arch))
|
||||
|
||||
# Fetch the cerbero commit that has the most number of caches
|
||||
best_commit = None
|
||||
newest_commit = None
|
||||
max_caches = 0
|
||||
total_caches = len(CERBERO_DEPS_LOGS_TARGETS)
|
||||
for commit, targets in all_commits.items():
|
||||
if newest_commit is None:
|
||||
newest_commit = commit
|
||||
have_caches = len(targets)
|
||||
# If this commit has caches for all targets, just use it
|
||||
if have_caches == total_caches:
|
||||
best_commit = commit
|
||||
break
|
||||
# Else, try to find the commit with the most caches
|
||||
if have_caches > max_caches:
|
||||
max_caches = have_caches
|
||||
best_commit = commit
|
||||
if newest_commit is None:
|
||||
print('WARNING: No deps logs were found, will build from scratch')
|
||||
if best_commit != newest_commit:
|
||||
print(f'WARNING: Cache is not up-to-date for commit {newest_commit}, using commit {best_commit} instead')
|
||||
return best_commit
|
||||
|
||||
|
||||
def get_branch_info(module: str, namespace: str, branch: str) -> Tuple[str, str]:
|
||||
try:
|
||||
res = git('ls-remote', f'https://gitlab.freedesktop.org/{namespace}/{module}.git', branch)
|
||||
except subprocess.CalledProcessError:
|
||||
return None, None
|
||||
|
||||
if not res:
|
||||
return None, None
|
||||
|
||||
# Special case cerbero to avoid cache misses
|
||||
if module == 'cerbero':
|
||||
sha = get_cerbero_last_build_info(branch)
|
||||
if sha is not None:
|
||||
return sha, sha
|
||||
|
||||
lines = res.split('\n')
|
||||
for line in lines:
|
||||
if line.endswith('/' + branch):
|
||||
try:
|
||||
sha, refname = line.split('\t')
|
||||
except ValueError:
|
||||
continue
|
||||
return refname.strip(), sha
|
||||
|
||||
return None, None
|
||||
|
||||
|
||||
def find_repository_sha(module: str, branchname: str) -> Tuple[str, str, str]:
|
||||
namespace: str = os.environ["CI_PROJECT_NAMESPACE"]
|
||||
ups_branch: str = os.getenv('GST_UPSTREAM_BRANCH', default='master')
|
||||
|
||||
if module == "orc":
|
||||
ups_branch = os.getenv('ORC_UPSTREAM_BRANCH', default='master')
|
||||
|
||||
if module == os.environ['CI_PROJECT_NAME']:
|
||||
return 'user', branchname, os.environ['CI_COMMIT_SHA']
|
||||
|
||||
if branchname != ups_branch:
|
||||
remote_refname, sha = get_branch_info(module, namespace, branchname)
|
||||
if sha is not None:
|
||||
return 'user', remote_refname, sha
|
||||
|
||||
# Check upstream project for a branch
|
||||
remote_refname, sha = get_branch_info(module, 'gstreamer', ups_branch)
|
||||
if sha is not None:
|
||||
return 'origin', remote_refname, sha
|
||||
|
||||
# This should never occur given the upstream fallback above
|
||||
print(f"Could not find anything for {module}:{branchname}")
|
||||
print("If something reaches that point, please file a bug")
|
||||
print("https://gitlab.freedesktop.org/gstreamer/gst-ci/issues")
|
||||
assert False
|
||||
|
||||
|
||||
# --- Unit tests --- #
|
||||
# Basically, pytest will happily let a test mutate a variable, and then run
|
||||
# the next tests one the same environment without reset the vars.
|
||||
def preserve_ci_vars(func):
|
||||
"""Preserve the original CI Variable values"""
|
||||
def wrapper():
|
||||
try:
|
||||
url = os.environ["CI_PROJECT_URL"]
|
||||
user = os.environ["CI_PROJECT_NAMESPACE"]
|
||||
except KeyError:
|
||||
url = "invalid"
|
||||
user = ""
|
||||
|
||||
private = os.getenv("READ_PROJECTS_TOKEN", default=None)
|
||||
if not private:
|
||||
os.environ["READ_PROJECTS_TOKEN"] = "FOO"
|
||||
|
||||
func()
|
||||
|
||||
os.environ["CI_PROJECT_URL"] = url
|
||||
os.environ["CI_PROJECT_NAMESPACE"] = user
|
||||
|
||||
if private:
|
||||
os.environ["READ_PROJECTS_TOKEN"] = private
|
||||
# if it was set after
|
||||
elif os.getenv("READ_PROJECTS_TOKEN", default=None):
|
||||
del os.environ["READ_PROJECTS_TOKEN"]
|
||||
|
||||
return wrapper
|
||||
|
||||
@preserve_ci_vars
|
||||
def test_find_repository_sha():
|
||||
os.environ["CI_PROJECT_NAME"] = "some-random-project"
|
||||
os.environ["CI_PROJECT_URL"] = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-good"
|
||||
os.environ["CI_PROJECT_NAMESPACE"] = "alatiera"
|
||||
os.environ["GST_UPSTREAM_BRANCH"] = "master"
|
||||
del os.environ["READ_PROJECTS_TOKEN"]
|
||||
|
||||
# This should find the repository in the user namespace
|
||||
remote, refname, git_ref = find_repository_sha("gst-plugins-good", "1.2")
|
||||
assert remote == "user"
|
||||
assert git_ref == "08ab260b8a39791e7e62c95f4b64fd5b69959325"
|
||||
assert refname == "refs/heads/1.2"
|
||||
|
||||
# This should fallback to upstream master branch since no matching branch was found
|
||||
remote, refname, git_ref = find_repository_sha("gst-plugins-good", "totally-valid-branch-name")
|
||||
assert remote == "origin"
|
||||
assert refname == "refs/heads/master"
|
||||
|
||||
os.environ["CI_PROJECT_NAME"] = "the_project"
|
||||
os.environ["CI_COMMIT_SHA"] = "MySha"
|
||||
|
||||
remote, refname, git_ref = find_repository_sha("the_project", "whatever")
|
||||
assert remote == "user"
|
||||
assert git_ref == "MySha"
|
||||
assert refname == "whatever"
|
||||
|
||||
|
||||
@preserve_ci_vars
|
||||
def test_get_project_branch():
|
||||
os.environ["CI_PROJECT_NAME"] = "some-random-project"
|
||||
os.environ["CI_COMMIT_SHA"] = "dwbuiw"
|
||||
os.environ["CI_PROJECT_URL"] = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-good"
|
||||
os.environ["CI_PROJECT_NAMESPACE"] = "nowaythisnamespaceexists_"
|
||||
del os.environ["READ_PROJECTS_TOKEN"]
|
||||
|
||||
os.environ['GST_UPSTREAM_BRANCH'] = '1.12'
|
||||
remote, refname, twelve = find_repository_sha('gst-plugins-good', '1.12')
|
||||
assert twelve is not None
|
||||
assert remote == 'origin'
|
||||
assert refname == "refs/heads/1.12"
|
||||
|
||||
os.environ['GST_UPSTREAM_BRANCH'] = '1.14'
|
||||
remote, refname, fourteen = find_repository_sha('gst-plugins-good', '1.14')
|
||||
assert fourteen is not None
|
||||
assert remote == 'origin'
|
||||
assert refname == "refs/heads/1.14"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--self-update", action="store_true", default=False)
|
||||
parser.add_argument(dest="output", default='manifest.xml', nargs='?')
|
||||
options = parser.parse_args()
|
||||
|
||||
current_branch: str = os.environ['CI_COMMIT_REF_NAME']
|
||||
user_remote_url: str = os.path.dirname(os.environ['CI_PROJECT_URL'])
|
||||
if not user_remote_url.endswith('/'):
|
||||
user_remote_url += '/'
|
||||
|
||||
if options.self_update:
|
||||
remote, remote_refname, sha = find_repository_sha("gst-ci", current_branch)
|
||||
if remote == 'user':
|
||||
remote = user_remote_url + 'gst-ci'
|
||||
else:
|
||||
remote = "https://gitlab.freedesktop.org/gstreamer/gst-ci"
|
||||
|
||||
git('fetch', remote, remote_refname)
|
||||
git('checkout', '--detach', sha)
|
||||
sys.exit(0)
|
||||
|
||||
projects: str = ''
|
||||
for module in GSTREAMER_MODULES:
|
||||
print(f"Checking {module}:", end=' ')
|
||||
remote, refname, revision = find_repository_sha(module, current_branch)
|
||||
print(f"remote '{remote}', refname: '{refname}', revision: '{revision}'")
|
||||
projects += f" <project path=\"{module}\" name=\"{module}.git\" remote=\"{remote}\" revision=\"{revision}\" refname=\"{refname}\" />\n"
|
||||
|
||||
with open(options.output, mode='w') as manifest:
|
||||
print(MANIFEST_TEMPLATE.format(user_remote_url, projects), file=manifest)
|
92
ci/gitlab/clone_manifest_ref.py
Executable file
92
ci/gitlab/clone_manifest_ref.py
Executable file
@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from collections import namedtuple
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
# Disallow git prompting for a username/password
|
||||
os.environ['GIT_TERMINAL_PROMPT'] = '0'
|
||||
def git(*args, repository_path='.'):
|
||||
return subprocess.check_output(["git"] + list(args), cwd=repository_path).decode()
|
||||
|
||||
class Manifest(object):
|
||||
'''
|
||||
Parse and store the content of a manifest file
|
||||
'''
|
||||
|
||||
remotes = {}
|
||||
projects = {}
|
||||
default_remote = 'origin'
|
||||
default_revision = 'refs/heads/master'
|
||||
|
||||
def __init__(self, manifest_path):
|
||||
self.manifest_path = manifest_path
|
||||
|
||||
def parse(self):
|
||||
try:
|
||||
tree = ET.parse(self.manifest_path)
|
||||
except Exception as ex:
|
||||
raise Exception("Error loading manifest %s in file %s" % (self.manifest_path, ex))
|
||||
|
||||
root = tree.getroot()
|
||||
|
||||
for child in root:
|
||||
if child.tag == 'remote':
|
||||
self.remotes[child.attrib['name']] = child.attrib['fetch']
|
||||
if child.tag == 'default':
|
||||
self.default_remote = child.attrib['remote'] or self.default_remote
|
||||
self.default_revision = child.attrib['revision'] or self.default_revision
|
||||
if child.tag == 'project':
|
||||
project = namedtuple('Project', ['name', 'remote',
|
||||
'revision', 'fetch_uri'])
|
||||
|
||||
project.name = child.attrib['name']
|
||||
if project.name.endswith('.git'):
|
||||
project.name = project.name[:-4]
|
||||
project.remote = child.attrib.get('remote') or self.default_remote
|
||||
project.revision = child.attrib.get('revision') or self.default_revision
|
||||
project.fetch_uri = self.remotes[project.remote] + project.name + '.git'
|
||||
|
||||
self.projects[project.name] = project
|
||||
|
||||
def find_project(self, name):
|
||||
try:
|
||||
return self.projects[name]
|
||||
except KeyError as ex:
|
||||
raise Exception("Could not find project %s in manifest %s" % (name, self.manifest_path))
|
||||
|
||||
def get_fetch_uri(self, project, remote):
|
||||
fetch = self.remotes[remote]
|
||||
return fetch + project.name + '.git'
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--project", action="store", type=str)
|
||||
parser.add_argument("--destination", action="store", type=str, default='.')
|
||||
parser.add_argument("--manifest", action="store", type=str)
|
||||
parser.add_argument("--fetch", action="store_true", default=False)
|
||||
options = parser.parse_args()
|
||||
|
||||
if not options.project:
|
||||
raise ValueError("--project argument not provided")
|
||||
if not options.manifest:
|
||||
raise ValueError("--manifest argument not provided")
|
||||
|
||||
manifest = Manifest(options.manifest)
|
||||
manifest.parse()
|
||||
project = manifest.find_project(options.project)
|
||||
|
||||
dest = options.destination
|
||||
if dest == '.':
|
||||
dest = os.path.join (os.getcwd(), project.name)
|
||||
|
||||
if options.fetch:
|
||||
assert os.path.exists(dest) == True
|
||||
git('fetch', project.fetch_uri, project.revision, repository_path=dest)
|
||||
else:
|
||||
git('clone', project.fetch_uri, dest)
|
||||
|
||||
git('checkout', '--detach', project.revision, repository_path=dest)
|
39
ci/gitlab/freedesktop_doc_importer.py
Executable file
39
ci/gitlab/freedesktop_doc_importer.py
Executable file
@ -0,0 +1,39 @@
|
||||
#!/usr/bin/python3
|
||||
import os
|
||||
import gitlab
|
||||
from datetime import datetime
|
||||
import tempfile
|
||||
from subprocess import check_call, call, check_output
|
||||
|
||||
BRANCH="main"
|
||||
NAMESPACE="gstreamer"
|
||||
JOB="documentation"
|
||||
DOC_BASE="/srv/gstreamer.freedesktop.org/public_html/documentation"
|
||||
|
||||
print(f"Running at {datetime.now()}")
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
os.chdir(tmpdir)
|
||||
|
||||
gl = gitlab.Gitlab("https://gitlab.freedesktop.org/")
|
||||
project = gl.projects.get(1357)
|
||||
pipelines = project.pipelines.list()
|
||||
for pipeline in pipelines:
|
||||
if pipeline.ref != BRANCH:
|
||||
continue
|
||||
|
||||
job, = [j for j in pipeline.jobs.list() if j.name == "documentation"]
|
||||
if job.status != "success":
|
||||
continue
|
||||
|
||||
url = f"https://gitlab.freedesktop.org/gstreamer/gstreamer/-/jobs/{job.id}/artifacts/download"
|
||||
print("============================================================================================================================")
|
||||
print(f"Updating documentation from: {url}\n\n")
|
||||
check_call(f"wget {url} -O gstdocs.zip", shell=True)
|
||||
print("Unziping file.")
|
||||
check_output("unzip gstdocs.zip", shell=True)
|
||||
print("Running rsync.")
|
||||
call(f"rsync -rvaz --links --delete documentation/ {DOC_BASE}", shell=True)
|
||||
call(f"chmod -R g+w {DOC_BASE}; chgrp -R gstreamer {DOC_BASE}", shell=True)
|
||||
|
||||
print(f"Done updating doc")
|
||||
break
|
64
ci/gitlab/trigger_cerbero_pipeline.py
Executable file
64
ci/gitlab/trigger_cerbero_pipeline.py
Executable file
@ -0,0 +1,64 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
import os
|
||||
import sys
|
||||
import gitlab
|
||||
|
||||
CERBERO_PROJECT = 'gstreamer/cerbero'
|
||||
|
||||
|
||||
|
||||
class Status:
|
||||
FAILED = 'failed'
|
||||
MANUAL = 'manual'
|
||||
CANCELED = 'canceled'
|
||||
SUCCESS = 'success'
|
||||
SKIPPED = 'skipped'
|
||||
CREATED = 'created'
|
||||
|
||||
@classmethod
|
||||
def is_finished(cls, state):
|
||||
return state in [
|
||||
cls.FAILED,
|
||||
cls.MANUAL,
|
||||
cls.CANCELED,
|
||||
cls.SUCCESS,
|
||||
cls.SKIPPED,
|
||||
]
|
||||
|
||||
|
||||
def fprint(msg):
|
||||
print(msg, end="")
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
server = os.environ['CI_SERVER_URL']
|
||||
gl = gitlab.Gitlab(server,
|
||||
private_token=os.environ.get('GITLAB_API_TOKEN'),
|
||||
job_token=os.environ.get('CI_JOB_TOKEN'))
|
||||
|
||||
cerbero = gl.projects.get(CERBERO_PROJECT)
|
||||
pipe = cerbero.trigger_pipeline(
|
||||
token=os.environ['CI_JOB_TOKEN'],
|
||||
ref=os.environ["GST_UPSTREAM_BRANCH"],
|
||||
variables={
|
||||
"CI_GSTREAMER_URL": os.environ["CI_PROJECT_URL"],
|
||||
"CI_GSTREAMER_REF_NAME": os.environ["CI_COMMIT_REF_NAME"],
|
||||
# This tells cerbero CI that this is a pipeline started via the
|
||||
# trigger API, which means it can use a deps cache instead of
|
||||
# building from scratch.
|
||||
"CI_GSTREAMER_TRIGGERED": "true",
|
||||
}
|
||||
)
|
||||
|
||||
fprint(f'Cerbero pipeline running at {pipe.web_url} ')
|
||||
while True:
|
||||
time.sleep(15)
|
||||
pipe.refresh()
|
||||
if Status.is_finished(pipe.status):
|
||||
fprint(f": {pipe.status}\n")
|
||||
sys.exit(0 if pipe.status == Status.SUCCESS else 1)
|
||||
else:
|
||||
fprint(".")
|
23
ci/meson/vs2019-arm64-cross-file.txt
Normal file
23
ci/meson/vs2019-arm64-cross-file.txt
Normal file
@ -0,0 +1,23 @@
|
||||
[constants]
|
||||
vs_path = 'C:\BuildTools'
|
||||
msvc_version = '14.29.30133'
|
||||
msvc_version_dir = vs_path / 'VC\Tools\MSVC' / msvc_version
|
||||
msvc_arm64_bindir = msvc_version_dir / 'bin\Hostx64\arm64'
|
||||
msvc_arm64_libdir = msvc_version_dir / 'lib\arm64'
|
||||
|
||||
[host_machine]
|
||||
system = 'windows'
|
||||
cpu_family = 'aarch64'
|
||||
cpu = 'aarch64'
|
||||
endian = 'little'
|
||||
|
||||
[properties]
|
||||
needs_exe_wrapper = true
|
||||
|
||||
[binaries]
|
||||
lib = msvc_arm64_bindir / 'lib.exe'
|
||||
c = msvc_arm64_bindir / 'cl.exe'
|
||||
c_ld = msvc_arm64_bindir / 'link.exe'
|
||||
cpp = msvc_arm64_bindir / 'cl.exe'
|
||||
cpp_ld = msvc_arm64_bindir / 'link.exe'
|
||||
pkgconfig = 'false'
|
38
ci/meson/vs2019-x64-native-file.txt
Normal file
38
ci/meson/vs2019-x64-native-file.txt
Normal file
@ -0,0 +1,38 @@
|
||||
[constants]
|
||||
vs_path = 'C:\BuildTools'
|
||||
msvc_version = '14.29.30133'
|
||||
msvc_version_dir = vs_path / 'VC\Tools\MSVC' / msvc_version
|
||||
msvc_x64_bindir = msvc_version_dir / 'bin\Hostx64\x64'
|
||||
msvc_x64_libdir = msvc_version_dir / 'lib\x64'
|
||||
wk_path = 'C:\Program Files (x86)\Windows Kits'
|
||||
wk_version = '10.0.19041.0'
|
||||
wk_x64_libdir = wk_path / '10\lib' / wk_version / 'um\x64'
|
||||
wk_x64_crt_libdir = wk_path / '10\lib' / wk_version / 'ucrt\x64'
|
||||
# Forcibly link to x64 libs when using native linker, otherwise the LIB
|
||||
# variable in the env will cause link.exe to pick up libs from the cross
|
||||
# msvc libdir. A better fix might be to use a wrapper script that calls
|
||||
# link.exe inside the correct VS environment for x64.
|
||||
msvc_x64_libs = [msvc_x64_libdir / 'msvcrt.lib', msvc_x64_libdir / 'msvcrtd.lib', msvc_x64_libdir / 'vcruntime.lib', msvc_x64_libdir / 'oldnames.lib', wk_x64_crt_libdir / 'ucrt.lib']
|
||||
|
||||
[host_machine]
|
||||
system = 'windows'
|
||||
cpu_family = 'x86_64'
|
||||
cpu = 'x86_64'
|
||||
endian = 'little'
|
||||
|
||||
[properties]
|
||||
needs_exe_wrapper = true
|
||||
|
||||
[built-in options]
|
||||
# Ensure that x64 libs are used for linking even when we're inside, say, an
|
||||
# arm64 VS environment
|
||||
c_link_args = ['/LIBPATH:' + wk_x64_libdir] + msvc_x64_libs
|
||||
cpp_link_args = ['/LIBPATH:' + wk_x64_libdir] + msvc_x64_libs
|
||||
|
||||
[binaries]
|
||||
lib = msvc_x64_bindir / 'lib.exe'
|
||||
c = msvc_x64_bindir / 'cl.exe'
|
||||
c_ld = msvc_x64_bindir / 'link.exe'
|
||||
cpp = msvc_x64_bindir / 'cl.exe'
|
||||
cpp_ld = msvc_x64_bindir / 'link.exe'
|
||||
pkgconfig = 'false'
|
20
ci/scripts/check-documentation-diff.py
Executable file
20
ci/scripts/check-documentation-diff.py
Executable file
@ -0,0 +1,20 @@
|
||||
#!/usr/bin/python3
|
||||
import os, subprocess, sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
diffsdir = 'plugins-cache-diffs'
|
||||
os.makedirs(diffsdir, exist_ok=True)
|
||||
res = 0
|
||||
try:
|
||||
subprocess.check_call(['git', 'diff', '--quiet'] )
|
||||
except subprocess.CalledProcessError:
|
||||
diffname = os.path.join(diffsdir, 'plugins_cache.diff')
|
||||
res += 1
|
||||
with open(diffname, 'w') as diff:
|
||||
subprocess.check_call(['git', 'diff'], stdout=diff)
|
||||
print('\033[91mYou have a diff in the documentation cache. Please update with:\033[0m')
|
||||
print(' $ curl %s/%s | git apply -' % (os.environ['CI_ARTIFACTS_URL'], diffname.replace('../', '')))
|
||||
|
||||
if res != 0:
|
||||
print('(note that it might take a few minutes for artefacts to be available on the server)\n')
|
||||
sys.exit(res)
|
80
ci/scripts/handle-subprojects-cache.py
Executable file
80
ci/scripts/handle-subprojects-cache.py
Executable file
@ -0,0 +1,80 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Copies current subproject git repository to create a cache
|
||||
"""
|
||||
|
||||
import shutil
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
DEST = "/subprojects"
|
||||
PARSER = argparse.ArgumentParser()
|
||||
PARSER.add_argument('subprojects_dir')
|
||||
PARSER.add_argument('--build', action="store_true", default=False)
|
||||
|
||||
|
||||
def create_cache_in_image(options):
|
||||
os.makedirs(DEST, exist_ok=True)
|
||||
print("Creating cache from %s" % options.subprojects_dir)
|
||||
for project_name in os.listdir(options.subprojects_dir):
|
||||
project_path = os.path.join(options.subprojects_dir, project_name)
|
||||
|
||||
if project_name != "packagecache" and not os.path.exists(os.path.join(project_path, '.git')):
|
||||
continue
|
||||
|
||||
if os.path.exists(os.path.join(DEST, project_name)):
|
||||
continue
|
||||
|
||||
print("Copying %s" % project_name)
|
||||
shutil.copytree(project_path, os.path.join(DEST, project_name))
|
||||
|
||||
media_path = os.path.join(options.subprojects_dir, '..', '.git',
|
||||
'modules', 'subprojects', 'gst-integration-testsuites', 'medias')
|
||||
if os.path.exists(os.path.join(DEST, 'medias.git')):
|
||||
return
|
||||
|
||||
if os.path.exists(media_path):
|
||||
print("Creating media cache")
|
||||
shutil.copytree(media_path, os.path.join(DEST, 'medias.git'))
|
||||
else:
|
||||
print("Did not find medias in %s" % media_path)
|
||||
|
||||
|
||||
def copy_cache(options):
|
||||
# FIXME Remove when not needed anymore.
|
||||
for path in [DEST, "/gst-build/subprojects", r"C:\gst-build\subprojects"]:
|
||||
if not os.path.exists(path):
|
||||
print("%s doesn't exist." % path)
|
||||
continue
|
||||
|
||||
for project_name in os.listdir(path):
|
||||
project_path = os.path.join(options.subprojects_dir, project_name)
|
||||
cache_dir = os.path.join(path, project_name)
|
||||
|
||||
if project_name == 'medias.git':
|
||||
project_path = os.path.join(options.subprojects_dir, '..', '.git', 'modules',
|
||||
'subprojects', 'gst-integration-testsuites')
|
||||
os.makedirs(project_path, exist_ok=True)
|
||||
project_path = os.path.join(project_path, 'medias')
|
||||
|
||||
if os.path.exists(project_path):
|
||||
print("- Ignoring %s" % cache_dir)
|
||||
continue
|
||||
|
||||
if not os.path.isdir(cache_dir):
|
||||
print("- Ignoring %s" % cache_dir)
|
||||
continue
|
||||
|
||||
print("Copying from %s -> %s" % (cache_dir, project_path))
|
||||
shutil.copytree(cache_dir, project_path)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
options = PARSER.parse_args()
|
||||
|
||||
if options.build:
|
||||
create_cache_in_image(options)
|
||||
else:
|
||||
copy_cache(options)
|
39
data/cross-files/README.md
Normal file
39
data/cross-files/README.md
Normal file
@ -0,0 +1,39 @@
|
||||
# Cross compiling GStreamer with gst-build
|
||||
|
||||
GStreamer can be cross compiled for various platforms using gst-build. However,
|
||||
only dependencies that are ported to the Meson build system will be built. It is
|
||||
recommended to use Cerbero to cross compile GStreamer when other external
|
||||
dependencies are required.
|
||||
|
||||
Once the toolchain is installed and a Meson cross file is created, to build
|
||||
GStreamer simply run for example: `meson --cross-file data/cross-files/mingw_w64_x86-64.txt builddir`.
|
||||
|
||||
## Android
|
||||
|
||||
Requires Android API level >= 28, previous versions are missing *iconv* dependency.
|
||||
|
||||
- Download and extract the [NDK](https://developer.android.com/ndk/)
|
||||
- Create a Meson cross file, you can use `android_arm64_api28.txt` as example
|
||||
and change CPU architectures and toolchain path using the prebuilt toolchains
|
||||
from the NDK.
|
||||
|
||||
Notes:
|
||||
- On fedora the Android NDK requires the `ncurses-compat-libs` package.
|
||||
|
||||
## Windows
|
||||
|
||||
GStreamer can be cross compiled for Windows using mingw packaged in most
|
||||
distribution.
|
||||
|
||||
The Meson cross file `mingw_w64_x86-64.txt` can be used when targeting amd64
|
||||
architecture, or adapted for i686 arch.
|
||||
|
||||
### Fedora
|
||||
|
||||
- Install the toolchain packages: `mingw64-gcc`, `mingw64-gcc-c++`. Fedora
|
||||
provides many other optional dependencies that could be installed as well.
|
||||
For example: `mingw64-gettext`, `mingw64-libffi`, `mingw64-zlib`.
|
||||
|
||||
### Ubuntu
|
||||
|
||||
- Install the toolchain package: `gcc-mingw-w64`.
|
20
data/cross-files/android_arm64_api28.txt
Normal file
20
data/cross-files/android_arm64_api28.txt
Normal file
@ -0,0 +1,20 @@
|
||||
[host_machine]
|
||||
system = 'android'
|
||||
cpu_family = 'aarch64'
|
||||
cpu = 'aarch64'
|
||||
endian = 'little'
|
||||
|
||||
[properties]
|
||||
sys_root = '/path/to/android-ndk-r21/sysroot'
|
||||
c_link_args = ['-fuse-ld=gold']
|
||||
cpp_link_args = ['-fuse-ld=gold']
|
||||
# Starting with 0.53.1, you can replace the above *_link_args:
|
||||
# c_ld = 'gold'
|
||||
# cpp_ld = 'gold'
|
||||
|
||||
[binaries]
|
||||
c = '/path/to/android-ndk-r21/toolchains/llvm/prebuilt/linux-x86_64/bin/aarch64-linux-android28-clang'
|
||||
cpp = '/path/to/android-ndk-r21/toolchains/llvm/prebuilt/linux-x86_64/bin/aarch64-linux-android28-clang++'
|
||||
ar = '/path/to/android-ndk-r21/toolchains/llvm/prebuilt/linux-x86_64/bin/aarch64-linux-android-ar'
|
||||
strip = '/path/to/android-ndk-r21/toolchains/llvm/prebuilt/linux-x86_64/bin/aarch64-linux-android-strip'
|
||||
pkgconfig = 'false'
|
17
data/cross-files/mingw_w64_x86-64.txt
Normal file
17
data/cross-files/mingw_w64_x86-64.txt
Normal file
@ -0,0 +1,17 @@
|
||||
[host_machine]
|
||||
system = 'windows'
|
||||
cpu_family = 'x86_64'
|
||||
cpu = 'x86_64'
|
||||
endian = 'little'
|
||||
|
||||
[properties]
|
||||
c_args = []
|
||||
c_link_args = []
|
||||
|
||||
[binaries]
|
||||
c = 'x86_64-w64-mingw32-gcc'
|
||||
cpp = 'x86_64-w64-mingw32-g++'
|
||||
ar = 'x86_64-w64-mingw32-ar'
|
||||
strip = 'x86_64-w64-mingw32-strip'
|
||||
pkgconfig = 'x86_64-w64-mingw32-pkg-config'
|
||||
windres = 'x86_64-w64-mingw32-windres'
|
BIN
data/images/git-installer-PATH.png
Normal file
BIN
data/images/git-installer-PATH.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 19 KiB |
BIN
data/images/py-installer-page1.png
Normal file
BIN
data/images/py-installer-page1.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 76 KiB |
BIN
data/images/vs-2019-dev-prompt.png
Normal file
BIN
data/images/vs-2019-dev-prompt.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 50 KiB |
19
data/misc/cmd_or_ps.ps1
Normal file
19
data/misc/cmd_or_ps.ps1
Normal file
@ -0,0 +1,19 @@
|
||||
$i=1
|
||||
$ppid=(gwmi win32_process -Filter "processid='$pid'").parentprocessid
|
||||
$pname=(Get-Process -id $ppid).Name
|
||||
While($true) {
|
||||
if($pname -eq "cmd" -Or $pname -eq "powershell") {
|
||||
Write-Host ("{0}.exe" -f $pname)
|
||||
Break
|
||||
}
|
||||
|
||||
# 10 times iteration seems to be sufficient
|
||||
if($i -gt 10) {
|
||||
Break
|
||||
}
|
||||
|
||||
# not found yet, find grand parant
|
||||
$ppid=(gwmi win32_process -Filter "processid='$ppid'").parentprocessid
|
||||
$pname=(Get-Process -id $ppid).Name
|
||||
$i++
|
||||
}
|
12
data/misc/gstreamer-full-default.map
Normal file
12
data/misc/gstreamer-full-default.map
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
global:
|
||||
ges_*;
|
||||
GES_*;
|
||||
GST_*;
|
||||
gst_*;
|
||||
_gst_*;
|
||||
g_*;
|
||||
glib_*;
|
||||
local:
|
||||
*;
|
||||
};
|
618
gst-env.py
Executable file
618
gst-env.py
Executable file
@ -0,0 +1,618 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import pathlib
|
||||
import signal
|
||||
from functools import lru_cache
|
||||
from pathlib import PurePath, Path
|
||||
|
||||
from typing import Any
|
||||
|
||||
from scripts.common import get_meson
|
||||
from scripts.common import git
|
||||
from scripts.common import win32_get_short_path_name
|
||||
from scripts.common import get_wine_shortpath
|
||||
|
||||
SCRIPTDIR = os.path.dirname(os.path.realpath(__file__))
|
||||
PREFIX_DIR = os.path.join(SCRIPTDIR, 'prefix')
|
||||
# Look for the following build dirs: `build` `_build` `builddir`
|
||||
DEFAULT_BUILDDIR = os.path.join(SCRIPTDIR, 'build')
|
||||
if not os.path.exists(DEFAULT_BUILDDIR):
|
||||
DEFAULT_BUILDDIR = os.path.join(SCRIPTDIR, '_build')
|
||||
if not os.path.exists(DEFAULT_BUILDDIR):
|
||||
DEFAULT_BUILDDIR = os.path.join(SCRIPTDIR, 'builddir')
|
||||
|
||||
TYPELIB_REG = re.compile(r'.*\.typelib$')
|
||||
SHAREDLIB_REG = re.compile(r'\.so|\.dylib|\.dll')
|
||||
|
||||
# libdir is expanded from option of the same name listed in the `meson
|
||||
# introspect --buildoptions` output.
|
||||
GSTPLUGIN_FILEPATH_REG_TEMPLATE = r'.*/{libdir}/gstreamer-1.0/[^/]+$'
|
||||
GSTPLUGIN_FILEPATH_REG = None
|
||||
|
||||
BC_RC = '''
|
||||
BASH_COMPLETION_SCRIPTS="{bash_completions}"
|
||||
BASH_COMPLETION_PATHS="{bash_completions_paths}"
|
||||
for p in $BASH_COMPLETION_PATHS; do
|
||||
for f in $BASH_COMPLETION_SCRIPTS; do
|
||||
[ -f "$p/$f" ] && . "$p/$f"
|
||||
done
|
||||
done
|
||||
'''
|
||||
BASH_COMPLETION_PATHS = [SCRIPTDIR + '/subprojects/gstreamer/data/bash-completion/completions']
|
||||
BASH_COMPLETION_PATHS += [SCRIPTDIR + '/subprojects/gst-devtools/validate/data/bash-completion/completions']
|
||||
|
||||
|
||||
def str_to_bool(value: Any) -> bool:
|
||||
"""Return whether the provided string (or any value really) represents true. Otherwise false.
|
||||
Just like plugin server stringToBoolean.
|
||||
"""
|
||||
if not value:
|
||||
return False
|
||||
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
|
||||
|
||||
|
||||
def listify(o):
|
||||
if isinstance(o, str):
|
||||
return [o]
|
||||
if isinstance(o, list):
|
||||
return o
|
||||
raise AssertionError('Object {!r} must be a string or a list'.format(o))
|
||||
|
||||
|
||||
def stringify(o):
|
||||
if isinstance(o, str):
|
||||
return o
|
||||
if isinstance(o, list):
|
||||
if len(o) == 1:
|
||||
return o[0]
|
||||
raise AssertionError('Did not expect object {!r} to have more than one element'.format(o))
|
||||
raise AssertionError('Object {!r} must be a string or a list'.format(o))
|
||||
|
||||
|
||||
def prepend_env_var(env, var, value, sysroot):
|
||||
if var is None:
|
||||
return
|
||||
if value.startswith(sysroot):
|
||||
value = value[len(sysroot):]
|
||||
# Try not to exceed maximum length limits for env vars on Windows
|
||||
if os.name == 'nt':
|
||||
value = win32_get_short_path_name(value)
|
||||
env_val = env.get(var, '')
|
||||
val = os.pathsep + value + os.pathsep
|
||||
# Don't add the same value twice
|
||||
if val in env_val or env_val.startswith(value + os.pathsep):
|
||||
return
|
||||
env[var] = val + env_val
|
||||
env[var] = env[var].replace(os.pathsep + os.pathsep, os.pathsep).strip(os.pathsep)
|
||||
|
||||
|
||||
def get_target_install_filename(target, filename):
|
||||
'''
|
||||
Checks whether this file is one of the files installed by the target
|
||||
'''
|
||||
basename = os.path.basename(filename)
|
||||
for install_filename in listify(target['install_filename']):
|
||||
if install_filename.endswith(basename):
|
||||
return install_filename
|
||||
return None
|
||||
|
||||
|
||||
def get_pkgconfig_variable_from_pcfile(pcfile, varname):
|
||||
variables = {}
|
||||
substre = re.compile('\$\{[^${}]+\}')
|
||||
with pcfile.open('r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
if '=' not in line:
|
||||
continue
|
||||
key, value = line[:-1].split('=', 1)
|
||||
subst = {}
|
||||
for each in substre.findall(value):
|
||||
substkey = each[2:-1]
|
||||
subst[each] = variables.get(substkey, '')
|
||||
for k, v in subst.items():
|
||||
value = value.replace(k, v)
|
||||
variables[key] = value
|
||||
return variables.get(varname, '')
|
||||
|
||||
|
||||
@lru_cache()
|
||||
def get_pkgconfig_variable(builddir, pcname, varname):
|
||||
'''
|
||||
Parsing isn't perfect, but it's good enough.
|
||||
'''
|
||||
pcfile = Path(builddir) / 'meson-private' / (pcname + '.pc')
|
||||
if pcfile.is_file():
|
||||
return get_pkgconfig_variable_from_pcfile(pcfile, varname)
|
||||
return subprocess.check_output(['pkg-config', pcname, '--variable=' + varname],
|
||||
universal_newlines=True, encoding='utf-8')
|
||||
|
||||
|
||||
def is_gio_module(target, filename, builddir):
|
||||
if target['type'] != 'shared module':
|
||||
return False
|
||||
install_filename = get_target_install_filename(target, filename)
|
||||
if not install_filename:
|
||||
return False
|
||||
giomoduledir = PurePath(get_pkgconfig_variable(builddir, 'gio-2.0', 'giomoduledir'))
|
||||
fpath = PurePath(install_filename)
|
||||
if fpath.parent != giomoduledir:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_library_target_and_not_plugin(target, filename):
|
||||
'''
|
||||
Don't add plugins to PATH/LD_LIBRARY_PATH because:
|
||||
1. We don't need to
|
||||
2. It causes us to exceed the PATH length limit on Windows and Wine
|
||||
'''
|
||||
if target['type'] != 'shared library':
|
||||
return False
|
||||
# Check if this output of that target is a shared library
|
||||
if not SHAREDLIB_REG.search(filename):
|
||||
return False
|
||||
# Check if it's installed to the gstreamer plugin location
|
||||
install_filename = get_target_install_filename(target, filename)
|
||||
if not install_filename:
|
||||
return False
|
||||
global GSTPLUGIN_FILEPATH_REG
|
||||
if GSTPLUGIN_FILEPATH_REG is None:
|
||||
GSTPLUGIN_FILEPATH_REG = re.compile(GSTPLUGIN_FILEPATH_REG_TEMPLATE)
|
||||
if GSTPLUGIN_FILEPATH_REG.search(install_filename.replace('\\', '/')):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_binary_target_and_in_path(target, filename, bindir):
|
||||
if target['type'] != 'executable':
|
||||
return False
|
||||
# Check if this file installed by this target is installed to bindir
|
||||
install_filename = get_target_install_filename(target, filename)
|
||||
if not install_filename:
|
||||
return False
|
||||
fpath = PurePath(install_filename)
|
||||
if fpath.parent != bindir:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_wine_subprocess_env(options, env):
|
||||
with open(os.path.join(options.builddir, 'meson-info', 'intro-buildoptions.json')) as f:
|
||||
buildoptions = json.load(f)
|
||||
|
||||
prefix, = [o for o in buildoptions if o['name'] == 'prefix']
|
||||
path = os.path.normpath(os.path.join(prefix['value'], 'bin'))
|
||||
prepend_env_var(env, "PATH", path, options.sysroot)
|
||||
wine_path = get_wine_shortpath(
|
||||
options.wine.split(' '),
|
||||
[path] + env.get('WINEPATH', '').split(';')
|
||||
)
|
||||
if options.winepath:
|
||||
wine_path += ';' + options.winepath
|
||||
env['WINEPATH'] = wine_path
|
||||
env['WINEDEBUG'] = 'fixme-all'
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def setup_gdb(options):
|
||||
python_paths = set()
|
||||
|
||||
if not shutil.which('gdb'):
|
||||
return python_paths
|
||||
|
||||
bdir = pathlib.Path(options.builddir).resolve()
|
||||
for libpath, gdb_path in [
|
||||
(os.path.join("subprojects", "gstreamer", "gst"),
|
||||
os.path.join("subprojects", "gstreamer", "libs", "gst", "helpers")),
|
||||
(os.path.join("subprojects", "glib", "gobject"), None),
|
||||
(os.path.join("subprojects", "glib", "glib"), None)]:
|
||||
|
||||
if not gdb_path:
|
||||
gdb_path = libpath
|
||||
|
||||
autoload_path = (pathlib.Path(bdir) / 'gdb-auto-load').joinpath(*bdir.parts[1:]) / libpath
|
||||
autoload_path.mkdir(parents=True, exist_ok=True)
|
||||
for gdb_helper in glob.glob(str(bdir / gdb_path / "*-gdb.py")):
|
||||
python_paths.add(str(bdir / gdb_path))
|
||||
python_paths.add(os.path.join(options.srcdir, gdb_path))
|
||||
try:
|
||||
if os.name == 'nt':
|
||||
shutil.copy(gdb_helper, str(autoload_path / os.path.basename(gdb_helper)))
|
||||
else:
|
||||
os.symlink(gdb_helper, str(autoload_path / os.path.basename(gdb_helper)))
|
||||
except (FileExistsError, shutil.SameFileError):
|
||||
pass
|
||||
|
||||
gdbinit_line = 'add-auto-load-scripts-directory {}\n'.format(bdir / 'gdb-auto-load')
|
||||
try:
|
||||
with open(os.path.join(options.srcdir, '.gdbinit'), 'r') as f:
|
||||
if gdbinit_line in f.readlines():
|
||||
return python_paths
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
with open(os.path.join(options.srcdir, '.gdbinit'), 'a') as f:
|
||||
f.write(gdbinit_line)
|
||||
|
||||
return python_paths
|
||||
|
||||
|
||||
def is_bash_completion_available(options):
|
||||
return os.path.exists(os.path.join(options.builddir, 'subprojects/gstreamer/data/bash-completion/helpers/gst'))
|
||||
|
||||
|
||||
def get_subprocess_env(options, gst_version):
|
||||
env = os.environ.copy()
|
||||
|
||||
env["CURRENT_GST"] = os.path.normpath(SCRIPTDIR)
|
||||
env["GST_VERSION"] = gst_version
|
||||
prepend_env_var(env, "GST_VALIDATE_SCENARIOS_PATH", os.path.normpath(
|
||||
"%s/subprojects/gst-devtools/validate/data/scenarios" % SCRIPTDIR),
|
||||
options.sysroot)
|
||||
env["GST_VALIDATE_PLUGIN_PATH"] = os.path.normpath(
|
||||
"%s/subprojects/gst-devtools/validate/plugins" % options.builddir)
|
||||
prepend_env_var(env, "GST_VALIDATE_APPS_DIR", os.path.normpath(
|
||||
"%s/subprojects/gst-editing-services/tests/validate" % SCRIPTDIR),
|
||||
options.sysroot)
|
||||
env["GST_ENV"] = 'gst-' + gst_version
|
||||
env["GST_REGISTRY"] = os.path.normpath(options.builddir + "/registry.dat")
|
||||
prepend_env_var(env, "PATH", os.path.normpath(
|
||||
"%s/subprojects/gst-devtools/validate/tools" % options.builddir),
|
||||
options.sysroot)
|
||||
|
||||
prepend_env_var(env, "GST_VALIDATE_SCENARIOS_PATH", os.path.normpath(
|
||||
"%s/subprojects/gst-examples/webrtc/check/validate/scenarios" %
|
||||
SCRIPTDIR), options.sysroot)
|
||||
prepend_env_var(env, "GST_VALIDATE_APPS_DIR", os.path.normpath(
|
||||
"%s/subprojects/gst-examples/webrtc/check/validate/apps" %
|
||||
SCRIPTDIR), options.sysroot)
|
||||
|
||||
if options.wine:
|
||||
return get_wine_subprocess_env(options, env)
|
||||
|
||||
prepend_env_var(env, "PATH", os.path.join(SCRIPTDIR, 'meson'),
|
||||
options.sysroot)
|
||||
|
||||
env["GST_PLUGIN_SYSTEM_PATH"] = ""
|
||||
env["GST_PLUGIN_SCANNER"] = os.path.normpath(
|
||||
"%s/subprojects/gstreamer/libs/gst/helpers/gst-plugin-scanner" % options.builddir)
|
||||
env["GST_PTP_HELPER"] = os.path.normpath(
|
||||
"%s/subprojects/gstreamer/libs/gst/helpers/gst-ptp-helper" % options.builddir)
|
||||
|
||||
if os.name == 'nt':
|
||||
lib_path_envvar = 'PATH'
|
||||
elif platform.system() == 'Darwin':
|
||||
# RPATH is sufficient on macOS, and DYLD_LIBRARY_PATH can cause issues with dynamic linker path priority
|
||||
lib_path_envvar = None
|
||||
else:
|
||||
lib_path_envvar = 'LD_LIBRARY_PATH'
|
||||
|
||||
prepend_env_var(env, "GST_PLUGIN_PATH", os.path.join(SCRIPTDIR, 'subprojects',
|
||||
'gst-python', 'plugin'),
|
||||
options.sysroot)
|
||||
prepend_env_var(env, "GST_PLUGIN_PATH", os.path.join(PREFIX_DIR, 'lib',
|
||||
'gstreamer-1.0'),
|
||||
options.sysroot)
|
||||
prepend_env_var(env, "GST_PLUGIN_PATH", os.path.join(options.builddir, 'subprojects',
|
||||
'libnice', 'gst'),
|
||||
options.sysroot)
|
||||
prepend_env_var(env, "GST_VALIDATE_SCENARIOS_PATH",
|
||||
os.path.join(PREFIX_DIR, 'share', 'gstreamer-1.0',
|
||||
'validate', 'scenarios'),
|
||||
options.sysroot)
|
||||
prepend_env_var(env, "GI_TYPELIB_PATH", os.path.join(PREFIX_DIR, 'lib',
|
||||
'lib', 'girepository-1.0'),
|
||||
options.sysroot)
|
||||
prepend_env_var(env, "PKG_CONFIG_PATH", os.path.join(PREFIX_DIR, 'lib', 'pkgconfig'),
|
||||
options.sysroot)
|
||||
|
||||
# gst-indent
|
||||
prepend_env_var(env, "PATH", os.path.join(SCRIPTDIR, 'scripts'),
|
||||
options.sysroot)
|
||||
|
||||
# tools: gst-launch-1.0, gst-inspect-1.0
|
||||
prepend_env_var(env, "PATH", os.path.join(options.builddir, 'subprojects',
|
||||
'gstreamer', 'tools'),
|
||||
options.sysroot)
|
||||
# plugin scanner and generator
|
||||
prepend_env_var(env, "PATH", os.path.join(options.builddir, 'subprojects',
|
||||
'gstreamer', 'docs'),
|
||||
options.sysroot)
|
||||
prepend_env_var(env, "PATH", os.path.join(options.builddir, 'subprojects',
|
||||
'gst-plugins-base', 'tools'),
|
||||
options.sysroot)
|
||||
|
||||
# Library and binary search paths
|
||||
prepend_env_var(env, "PATH", os.path.join(PREFIX_DIR, 'bin'),
|
||||
options.sysroot)
|
||||
if lib_path_envvar != 'PATH':
|
||||
prepend_env_var(env, lib_path_envvar, os.path.join(PREFIX_DIR, 'lib'),
|
||||
options.sysroot)
|
||||
prepend_env_var(env, lib_path_envvar, os.path.join(PREFIX_DIR, 'lib64'),
|
||||
options.sysroot)
|
||||
elif 'QMAKE' in os.environ:
|
||||
# There's no RPATH on Windows, so we need to set PATH for the qt5 DLLs
|
||||
prepend_env_var(env, 'PATH', os.path.dirname(os.environ['QMAKE']),
|
||||
options.sysroot)
|
||||
|
||||
meson = get_meson()
|
||||
targets_s = subprocess.check_output(meson + ['introspect', options.builddir, '--targets'])
|
||||
targets = json.loads(targets_s.decode())
|
||||
paths = set()
|
||||
mono_paths = set()
|
||||
srcdir_path = pathlib.Path(options.srcdir)
|
||||
|
||||
build_options_s = subprocess.check_output(meson + ['introspect', options.builddir, '--buildoptions'])
|
||||
build_options = json.loads(build_options_s.decode())
|
||||
libdir, = [o['value'] for o in build_options if o['name'] == 'libdir']
|
||||
libdir = PurePath(libdir)
|
||||
prefix, = [o['value'] for o in build_options if o['name'] == 'prefix']
|
||||
bindir, = [o['value'] for o in build_options if o['name'] == 'bindir']
|
||||
prefix = PurePath(prefix)
|
||||
bindir = prefix / bindir
|
||||
|
||||
global GSTPLUGIN_FILEPATH_REG_TEMPLATE
|
||||
GSTPLUGIN_FILEPATH_REG_TEMPLATE = GSTPLUGIN_FILEPATH_REG_TEMPLATE.format(libdir=libdir.as_posix())
|
||||
|
||||
for target in targets:
|
||||
filenames = listify(target['filename'])
|
||||
if not target['installed']:
|
||||
continue
|
||||
for filename in filenames:
|
||||
root = os.path.dirname(filename)
|
||||
if srcdir_path / "subprojects/gst-devtools/validate/plugins" in (srcdir_path / root).parents:
|
||||
continue
|
||||
if filename.endswith('.dll'):
|
||||
mono_paths.add(os.path.join(options.builddir, root))
|
||||
if TYPELIB_REG.search(filename):
|
||||
prepend_env_var(env, "GI_TYPELIB_PATH",
|
||||
os.path.join(options.builddir, root),
|
||||
options.sysroot)
|
||||
elif is_library_target_and_not_plugin(target, filename):
|
||||
prepend_env_var(env, lib_path_envvar,
|
||||
os.path.join(options.builddir, root),
|
||||
options.sysroot)
|
||||
elif is_binary_target_and_in_path(target, filename, bindir):
|
||||
paths.add(os.path.join(options.builddir, root))
|
||||
elif is_gio_module(target, filename, options.builddir):
|
||||
prepend_env_var(env, 'GIO_EXTRA_MODULES',
|
||||
os.path.join(options.builddir, root),
|
||||
options.sysroot)
|
||||
|
||||
# Search for the Plugin paths file either in the build directory root
|
||||
# or check if gstreamer is a subproject of another project
|
||||
for sub_directories in [[], ['subprojects', 'gstreamer']]:
|
||||
plugin_paths = os.path.join(options.builddir, *sub_directories, 'GstPluginsPath.json')
|
||||
if os.path.exists(plugin_paths):
|
||||
with open(plugin_paths) as f:
|
||||
for plugin_path in json.load(f):
|
||||
prepend_env_var(env, 'GST_PLUGIN_PATH', plugin_path,
|
||||
options.sysroot)
|
||||
break
|
||||
|
||||
# Sort to iterate in a consistent order (`set`s and `hash`es are randomized)
|
||||
for p in sorted(paths):
|
||||
prepend_env_var(env, 'PATH', p, options.sysroot)
|
||||
|
||||
if os.name != 'nt':
|
||||
for p in sorted(mono_paths):
|
||||
prepend_env_var(env, "MONO_PATH", p, options.sysroot)
|
||||
|
||||
presets = set()
|
||||
encoding_targets = set()
|
||||
python_dirs = setup_gdb(options)
|
||||
overrides_dirs = set()
|
||||
if '--installed' in subprocess.check_output(meson + ['introspect', '-h']).decode():
|
||||
installed_s = subprocess.check_output(meson + ['introspect', options.builddir, '--installed'])
|
||||
for path, installpath in json.loads(installed_s.decode()).items():
|
||||
installpath_parts = pathlib.Path(installpath).parts
|
||||
|
||||
# We want to add all python modules to the PYTHONPATH
|
||||
# in a manner consistent with the way they would be imported:
|
||||
# For example if the source path /home/meh/foo/bar.py
|
||||
# is to be installed in /usr/lib/python/site-packages/foo/bar.py,
|
||||
# we want to add /home/meh to the PYTHONPATH.
|
||||
# This will only work for projects where the paths to be installed
|
||||
# mirror the installed directory layout, for example if the path
|
||||
# is /home/meh/baz/bar.py and the install path is
|
||||
# /usr/lib/site-packages/foo/bar.py , we will not add anything
|
||||
# to PYTHONPATH, but the current approach works with pygobject
|
||||
# and gst-python at least.
|
||||
if 'site-packages' in installpath_parts:
|
||||
install_subpath = os.path.join(*installpath_parts[installpath_parts.index('site-packages') + 1:])
|
||||
if path.endswith(install_subpath):
|
||||
if os.path.commonprefix(["gi/overrides", install_subpath]):
|
||||
overrides_dirs.add(os.path.dirname(path))
|
||||
else:
|
||||
python_dirs.add(path[:len(install_subpath) * -1])
|
||||
|
||||
if path.endswith('.prs'):
|
||||
presets.add(os.path.dirname(path))
|
||||
elif path.endswith('.gep'):
|
||||
encoding_targets.add(
|
||||
os.path.abspath(os.path.join(os.path.dirname(path), '..')))
|
||||
|
||||
if path.endswith('gstomx.conf'):
|
||||
prepend_env_var(env, 'GST_OMX_CONFIG_DIR', os.path.dirname(path),
|
||||
options.sysroot)
|
||||
|
||||
for p in sorted(presets):
|
||||
prepend_env_var(env, 'GST_PRESET_PATH', p, options.sysroot)
|
||||
|
||||
for t in sorted(encoding_targets):
|
||||
prepend_env_var(env, 'GST_ENCODING_TARGET_PATH', t, options.sysroot)
|
||||
|
||||
# Check if meson has generated -uninstalled pkgconfig files
|
||||
meson_uninstalled = pathlib.Path(options.builddir) / 'meson-uninstalled'
|
||||
if meson_uninstalled.is_dir():
|
||||
prepend_env_var(env, 'PKG_CONFIG_PATH', str(meson_uninstalled), options.sysroot)
|
||||
|
||||
for python_dir in sorted(python_dirs):
|
||||
prepend_env_var(env, 'PYTHONPATH', python_dir, options.sysroot)
|
||||
|
||||
for python_dir in sorted(overrides_dirs):
|
||||
prepend_env_var(env, '_GI_OVERRIDES_PATH', python_dir, options.sysroot)
|
||||
|
||||
mesonpath = os.path.join(SCRIPTDIR, "meson")
|
||||
if os.path.join(mesonpath):
|
||||
# Add meson/ into PYTHONPATH if we are using a local meson
|
||||
prepend_env_var(env, 'PYTHONPATH', mesonpath, options.sysroot)
|
||||
|
||||
# Ensure that gst-python/gi is used first
|
||||
prepend_env_var(env, "PYTHONPATH", os.path.join(SCRIPTDIR, 'subprojects', 'gst-python'),
|
||||
options.sysroot)
|
||||
|
||||
# For devhelp books
|
||||
if 'XDG_DATA_DIRS' not in env or not env['XDG_DATA_DIRS']:
|
||||
# Preserve default paths when empty
|
||||
prepend_env_var(env, 'XDG_DATA_DIRS', '/usr/local/share/:/usr/share/', '')
|
||||
|
||||
prepend_env_var(env, 'XDG_DATA_DIRS', os.path.join(options.builddir,
|
||||
'subprojects',
|
||||
'gst-docs',
|
||||
'GStreamer-doc'),
|
||||
options.sysroot)
|
||||
|
||||
if 'XDG_CONFIG_DIRS' not in env or not env['XDG_CONFIG_DIRS']:
|
||||
# Preserve default paths when empty
|
||||
prepend_env_var(env, 'XDG_CONFIG_DIRS', '/etc/local/xdg:/etc/xdg', '')
|
||||
|
||||
prepend_env_var(env, "XDG_CONFIG_DIRS", os.path.join(PREFIX_DIR, 'etc', 'xdg'),
|
||||
options.sysroot)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def get_windows_shell():
|
||||
command = ['powershell.exe', '-noprofile', '-executionpolicy', 'bypass', '-file',
|
||||
os.path.join(SCRIPTDIR, 'data', 'misc', 'cmd_or_ps.ps1')]
|
||||
result = subprocess.check_output(command)
|
||||
return result.decode().strip()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(prog="gst-env")
|
||||
|
||||
parser.add_argument("--builddir",
|
||||
default=DEFAULT_BUILDDIR,
|
||||
help="The meson build directory")
|
||||
parser.add_argument("--srcdir",
|
||||
default=SCRIPTDIR,
|
||||
help="The top level source directory")
|
||||
parser.add_argument("--sysroot",
|
||||
default='',
|
||||
help="The sysroot path used during cross-compilation")
|
||||
parser.add_argument("--wine",
|
||||
default='',
|
||||
help="Build a wine env based on specified wine command")
|
||||
parser.add_argument("--winepath",
|
||||
default='',
|
||||
help="Extra path to set to WINEPATH.")
|
||||
parser.add_argument("--only-environment",
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Do not start a shell, only print required environment.")
|
||||
options, args = parser.parse_known_args()
|
||||
|
||||
if not os.path.exists(options.builddir):
|
||||
print("GStreamer not built in %s\n\nBuild it and try again" %
|
||||
options.builddir)
|
||||
exit(1)
|
||||
options.builddir = os.path.abspath(options.builddir)
|
||||
|
||||
if not os.path.exists(options.srcdir):
|
||||
print("The specified source dir does not exist" %
|
||||
options.srcdir)
|
||||
exit(1)
|
||||
|
||||
# The following incantation will retrieve the current branch name.
|
||||
try:
|
||||
gst_version = git("rev-parse", "--symbolic-full-name", "--abbrev-ref", "HEAD",
|
||||
repository_path=options.srcdir).strip('\n')
|
||||
except subprocess.CalledProcessError:
|
||||
gst_version = "unknown"
|
||||
|
||||
if options.wine:
|
||||
gst_version += '-' + os.path.basename(options.wine)
|
||||
|
||||
env = get_subprocess_env(options, gst_version)
|
||||
if not args:
|
||||
if os.name == 'nt':
|
||||
shell = get_windows_shell()
|
||||
if shell == 'powershell.exe':
|
||||
args = ['powershell.exe']
|
||||
args += ['-NoLogo', '-NoExit']
|
||||
prompt = 'function global:prompt { "[gst-' + gst_version + '"+"] PS " + $PWD + "> "}'
|
||||
args += ['-Command', prompt]
|
||||
else:
|
||||
args = [os.environ.get("COMSPEC", r"C:\WINDOWS\system32\cmd.exe")]
|
||||
args += ['/k', 'prompt [gst-{}] $P$G'.format(gst_version)]
|
||||
else:
|
||||
args = [os.environ.get("SHELL", os.path.realpath("/bin/sh"))]
|
||||
if args[0].endswith('bash') and not str_to_bool(os.environ.get("GST_BUILD_DISABLE_PS1_OVERRIDE", r"FALSE")):
|
||||
# Let the GC remove the tmp file
|
||||
tmprc = tempfile.NamedTemporaryFile(mode='w')
|
||||
bashrc = os.path.expanduser('~/.bashrc')
|
||||
if os.path.exists(bashrc):
|
||||
with open(bashrc, 'r') as src:
|
||||
shutil.copyfileobj(src, tmprc)
|
||||
tmprc.write('\nexport PS1="[gst-%s] $PS1"' % gst_version)
|
||||
tmprc.flush()
|
||||
if is_bash_completion_available(options):
|
||||
bash_completions_files = []
|
||||
for p in BASH_COMPLETION_PATHS:
|
||||
if os.path.exists(p):
|
||||
bash_completions_files += os.listdir(path=p)
|
||||
bc_rc = BC_RC.format(bash_completions=' '.join(bash_completions_files), bash_completions_paths=' '.join(BASH_COMPLETION_PATHS))
|
||||
tmprc.write(bc_rc)
|
||||
tmprc.flush()
|
||||
args.append("--rcfile")
|
||||
args.append(tmprc.name)
|
||||
elif args[0].endswith('fish'):
|
||||
# Ignore SIGINT while using fish as the shell to make it behave
|
||||
# like other shells such as bash and zsh.
|
||||
# See: https://gitlab.freedesktop.org/gstreamer/gst-build/issues/18
|
||||
signal.signal(signal.SIGINT, lambda x, y: True)
|
||||
# Set the prompt
|
||||
args.append('--init-command')
|
||||
prompt_cmd = '''functions --copy fish_prompt original_fish_prompt
|
||||
function fish_prompt
|
||||
echo -n '[gst-{}] '(original_fish_prompt)
|
||||
end'''.format(gst_version)
|
||||
args.append(prompt_cmd)
|
||||
elif args[0].endswith('zsh'):
|
||||
tmpdir = tempfile.TemporaryDirectory()
|
||||
# Let the GC remove the tmp file
|
||||
tmprc = open(os.path.join(tmpdir.name, '.zshrc'), 'w')
|
||||
zshrc = os.path.expanduser('~/.zshrc')
|
||||
if os.path.exists(zshrc):
|
||||
with open(zshrc, 'r') as src:
|
||||
shutil.copyfileobj(src, tmprc)
|
||||
tmprc.write('\nexport PROMPT="[gst-{}] $PROMPT"'.format(gst_version))
|
||||
tmprc.flush()
|
||||
env['ZDOTDIR'] = tmpdir.name
|
||||
try:
|
||||
if options.only_environment:
|
||||
for name, value in env.items():
|
||||
print('{}={}'.format(name, shlex.quote(value)))
|
||||
print('export {}'.format(name))
|
||||
else:
|
||||
if os.environ.get("CI_PROJECT_NAME"):
|
||||
print("Ignoring SIGINT when running on the CI,"
|
||||
" as we get spurious sigint in there for some reason.")
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
exit(subprocess.call(args, close_fds=False, env=env))
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
exit(e.returncode)
|
406
meson.build
Normal file
406
meson.build
Normal file
@ -0,0 +1,406 @@
|
||||
project('gstreamer-full', 'c',
|
||||
version : '1.21.0.1',
|
||||
meson_version : '>= 0.59',
|
||||
default_options : ['buildtype=debugoptimized',
|
||||
# Needed due to https://github.com/mesonbuild/meson/issues/1889,
|
||||
# but this can cause problems in the future. Remove it
|
||||
# when it's no longer necessary.
|
||||
'cpp_std=c++14'])
|
||||
|
||||
gst_version = '>= @0@'.format(meson.project_version())
|
||||
|
||||
build_system = build_machine.system()
|
||||
cc = meson.get_compiler('c')
|
||||
|
||||
fs = import('fs')
|
||||
gnome = import('gnome')
|
||||
pkgconfig = import('pkgconfig')
|
||||
python3 = import('python').find_installation()
|
||||
# Ensure that we're not being run from inside the development environment
|
||||
# because that will confuse meson, and it might find the already-built
|
||||
# gstreamer. It's fine if people run `ninja` as long as it doesn't run
|
||||
# reconfigure because ninja doesn't care about the env.
|
||||
ensure_not_devenv = '''
|
||||
import os
|
||||
assert('GST_ENV' not in os.environ)
|
||||
'''
|
||||
cmdres = run_command(python3, '-c', ensure_not_devenv, check: false)
|
||||
if cmdres.returncode() != 0
|
||||
error('Do not run `ninja reconfigure` or `meson` for gst-build inside the development environment, you will run into problems')
|
||||
endif
|
||||
|
||||
# Install gst-indent pre-commit hook
|
||||
run_command(python3, '-c', 'import shutil; shutil.copy("scripts/git-hooks/multi-pre-commit.hook", ".git/hooks/pre-commit")', check: false)
|
||||
|
||||
# Ensure that the user does not have Strawberry Perl in PATH, since it ships
|
||||
# with a pkg-config.bat and broken pkgconfig files for libffi and zlib. Will
|
||||
# cause a build error, such as in
|
||||
# https://gitlab.freedesktop.org/gstreamer/gst-build/-/issues/41
|
||||
ensure_no_strawberry_perl = '''
|
||||
import os
|
||||
assert(r'Strawberry\perl\bin' not in os.environ['PATH'])
|
||||
'''
|
||||
if build_system == 'windows' and meson.version().version_compare('<0.60.0')
|
||||
cmdres = run_command(python3, '-c', ensure_no_strawberry_perl, check: false)
|
||||
if cmdres.returncode() != 0
|
||||
error('You have Strawberry Perl in PATH which is known to cause build issues with Meson < 0.60.0. Please remove it from PATH, uninstall it, or upgrade Meson.')
|
||||
endif
|
||||
endif
|
||||
|
||||
documented_projects = ''
|
||||
# Make it possible to use msys2 built zlib which fails
|
||||
# when not using the mingw toolchain as it uses unistd.h
|
||||
if not meson.is_subproject() and cc.get_id() == 'msvc'
|
||||
uname = find_program('uname', required: false)
|
||||
if uname.found()
|
||||
ret = run_command(uname, '-o', check: false)
|
||||
if ret.returncode() == 0 and ret.stdout().to_lower() == 'msys'
|
||||
ret = run_command(uname, '-r', check: false)
|
||||
# The kernel version returned by uname is actually the msys version
|
||||
if ret.returncode() == 0 and ret.stdout().startswith('2')
|
||||
# If a system zlib is found, disable UNIX features in zlib.h and zconf.h
|
||||
if cc.find_library('z').found()
|
||||
add_global_arguments('-DZ_SOLO', language: 'c')
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
||||
# Ensure that MSVC interprets all source code as UTF-8. Only do this when we're
|
||||
# not a subproject, because subprojects are not allowed to call
|
||||
# add_global_arguments().
|
||||
if not meson.is_subproject() and cc.get_id() == 'msvc'
|
||||
add_global_arguments(
|
||||
cc.get_supported_arguments(['/utf-8']), # set the input encoding to utf-8
|
||||
language: ['c', 'cpp'])
|
||||
endif
|
||||
|
||||
# Ordered list of subprojects (dict has no ordering guarantees)
|
||||
subprojects = [
|
||||
['gstreamer', {'build-hotdoc': true}],
|
||||
['gst-plugins-base', {'option': get_option('base'), 'build-hotdoc': true}],
|
||||
['gst-plugins-good', {'option': get_option('good'), 'build-hotdoc': true}],
|
||||
['libnice', { 'option': get_option('libnice'), 'match_gst_version': false}],
|
||||
['gst-plugins-bad', { 'option': get_option('bad'), 'build-hotdoc': true}],
|
||||
['gst-plugins-ugly', { 'option': get_option('ugly'), 'build-hotdoc': true}],
|
||||
['gst-libav', { 'option': get_option('libav'), 'build-hotdoc': true}],
|
||||
['gst-rtsp-server', { 'option': get_option('rtsp_server'), 'build-hotdoc': true}],
|
||||
['gst-devtools', { 'option': get_option('devtools'), 'build-hotdoc': true }],
|
||||
['gst-integration-testsuites', { 'option': get_option('devtools') }],
|
||||
['gst-editing-services', { 'option': get_option('ges'), 'build-hotdoc': true}],
|
||||
['gstreamer-vaapi', { 'option': get_option('vaapi'), 'build-hotdoc': true}],
|
||||
['gst-omx', { 'option': get_option('omx'), 'build-hotdoc': true}],
|
||||
['gstreamer-sharp', { 'option': get_option('sharp') }],
|
||||
['pygobject', { 'option': get_option('python'), 'match_gst_version': false, 'sysdep': 'pygobject-3.0', 'sysdep_version': '>= 3.8' }],
|
||||
['gst-python', { 'option': get_option('python')}],
|
||||
['gst-examples', { 'option': get_option('gst-examples'), 'match_gst_versions': false}],
|
||||
['gst-plugins-rs', { 'option': get_option('rs'), 'match_gst_version': false}],
|
||||
]
|
||||
|
||||
symlink = '''
|
||||
import os
|
||||
|
||||
os.symlink(os.path.join('@1@', 'subprojects', '@0@'),
|
||||
os.path.join('@1@', '@0@'))
|
||||
'''
|
||||
|
||||
if build_system == 'windows'
|
||||
subproject('win-flex-bison-binaries')
|
||||
subproject('win-nasm')
|
||||
elif build_system == 'darwin'
|
||||
subproject('macos-bison-binary')
|
||||
endif
|
||||
|
||||
orc_subproject = subproject('orc', required: get_option('orc'))
|
||||
|
||||
foreach custom_subproj: get_option('custom_subprojects').split(',')
|
||||
if custom_subproj != ''
|
||||
message ('Adding custom subproject ' + custom_subproj)
|
||||
subprojects += [[custom_subproj, {'match_gst_version': false}]]
|
||||
endif
|
||||
endforeach
|
||||
|
||||
|
||||
subprojects_names = []
|
||||
plugins_doc_caches = []
|
||||
orc_update_targets = []
|
||||
all_plugins = []
|
||||
# Using a list and not a dict to keep the ordering to build the chain of `gir`
|
||||
# dependencies
|
||||
all_libraries = []
|
||||
foreach sp : subprojects
|
||||
project_name = sp[0]
|
||||
build_infos = sp[1]
|
||||
is_required = build_infos.get('option', true)
|
||||
sysdep = build_infos.get('sysdep', '')
|
||||
sysdep_version = build_infos.get('sysdep_version', '')
|
||||
match_gst_version = build_infos.get('match_gst_version', true)
|
||||
|
||||
if match_gst_version
|
||||
subproj = subproject(project_name, version: gst_version, required: is_required)
|
||||
elif sysdep != ''
|
||||
sysdep_dep = dependency(sysdep, version: sysdep_version, required: false)
|
||||
if not sysdep_dep.found()
|
||||
subproj = subproject(project_name, required: is_required)
|
||||
endif
|
||||
else
|
||||
subproj = subproject(project_name, required: is_required)
|
||||
endif
|
||||
|
||||
if subproj.found()
|
||||
plugins = subproj.get_variable('plugins', [])
|
||||
all_plugins += plugins
|
||||
all_libraries += subproj.get_variable('libraries', [])
|
||||
|
||||
orc_update_targets += subproj.get_variable('orc_update_targets', [])
|
||||
|
||||
subprojects_names += [project_name]
|
||||
|
||||
if not meson.is_cross_build() and build_infos.get('build-hotdoc', false)
|
||||
if plugins.length() > 0
|
||||
plugins_doc_caches += [subproj.get_variable('plugins_doc_dep', [])]
|
||||
endif
|
||||
if documented_projects != ''
|
||||
documented_projects += ','
|
||||
endif
|
||||
documented_projects += project_name
|
||||
endif
|
||||
endif
|
||||
endforeach
|
||||
|
||||
# Check if we need to also build glib-networking for TLS modules
|
||||
glib_dep = dependency('glib-2.0')
|
||||
if glib_dep.type_name() == 'internal'
|
||||
subproject('glib-networking', required : get_option('tls'),
|
||||
default_options: ['gnutls=auto', 'openssl=auto'])
|
||||
endif
|
||||
|
||||
plugins_doc_dep = custom_target('plugins-doc-cache',
|
||||
command: [python3, '-c', 'print("Built all doc caches")'],
|
||||
input: plugins_doc_caches,
|
||||
output: 'plugins_doc_caches',
|
||||
capture: true,
|
||||
)
|
||||
|
||||
if meson.is_cross_build() or build_machine.system() == 'windows'
|
||||
if get_option('doc').enabled()
|
||||
error('Documentation enabled but building the doc while cross building or building on windows is not supported yet.')
|
||||
endif
|
||||
|
||||
documented_projects = ''
|
||||
message('Documentation not built as building the documentation while cross building or building on windows is not supported yet.')
|
||||
else
|
||||
hotdoc_p = find_program('hotdoc', required : get_option('doc'))
|
||||
if not hotdoc_p.found()
|
||||
documented_projects = ''
|
||||
message('Not building documentation as hotdoc was not found')
|
||||
endif
|
||||
endif
|
||||
|
||||
write_file_contents = '''
|
||||
import os
|
||||
import sys
|
||||
|
||||
assert len(sys.argv) >= 3
|
||||
fname = sys.argv[1]
|
||||
contents = sys.argv[2]
|
||||
|
||||
with open(fname, 'w') as f:
|
||||
f.write(contents)
|
||||
'''
|
||||
|
||||
configure_file(
|
||||
output : 'GstDocumentedSubprojects',
|
||||
command : [python3,
|
||||
'-c', write_file_contents,
|
||||
'@OUTPUT@',
|
||||
documented_projects]
|
||||
)
|
||||
|
||||
if documented_projects != ''
|
||||
subproject('gst-docs', required: get_option('doc').enabled())
|
||||
message('Gst docs subprojects: ' + documented_projects)
|
||||
endif
|
||||
|
||||
all_plugins_paths = []
|
||||
all_plugins_dirs = []
|
||||
foreach plugin: all_plugins
|
||||
all_plugins_paths += plugin.full_path()
|
||||
all_plugins_dirs += fs.parent(plugin.full_path())
|
||||
endforeach
|
||||
# Work around meson bug: https://github.com/mesonbuild/meson/pull/6770
|
||||
pathsep = host_machine.system() == 'windows' ? ';' : ':'
|
||||
all_plugins_paths = pathsep.join(all_plugins_paths)
|
||||
|
||||
devenv = environment()
|
||||
devenv.prepend('GST_PLUGIN_PATH', all_plugins_dirs)
|
||||
devenv.set('CURRENT_GST', meson.current_source_dir())
|
||||
devenv.set('GST_VERSION', meson.project_version())
|
||||
devenv.set('GST_ENV', 'gst-' + meson.project_version())
|
||||
devenv.set('GST_REGISTRY', meson.current_build_dir() / 'registry.dat')
|
||||
devenv.set('GST_PLUGIN_SYSTEM_PATH', '')
|
||||
meson.add_devenv(devenv)
|
||||
|
||||
generate_plugins_paths = find_program('scripts/generate_plugins_path.py')
|
||||
configure_file(
|
||||
output : 'GstPluginsPath.json',
|
||||
command : [generate_plugins_paths,
|
||||
'@OUTPUT@',
|
||||
all_plugins_paths]
|
||||
)
|
||||
|
||||
if get_option('default_library') == 'static'
|
||||
# Generate a .c file which declare and register all built plugins
|
||||
plugins_names = []
|
||||
foreach plugin: all_plugins
|
||||
plugins_names += plugin.full_path()
|
||||
endforeach
|
||||
all_plugin_names = ';'.join(plugins_names)
|
||||
|
||||
static_plugins = get_option('gst-full-plugins')
|
||||
if static_plugins == '*'
|
||||
static_plugins = all_plugin_names
|
||||
endif
|
||||
generate_init_static_plugins = find_program('scripts/generate_init_static_plugins.py')
|
||||
init_static_plugins_c = configure_file(
|
||||
output: 'gstinitstaticplugins.c',
|
||||
command : [generate_init_static_plugins,
|
||||
'-o ' + '@OUTPUT@',
|
||||
'-p ' + static_plugins,
|
||||
'-e ' + get_option('gst-full-elements'),
|
||||
'-t ' + get_option('gst-full-typefind-functions'),
|
||||
'-d ' + get_option('gst-full-device-providers'),
|
||||
'-T ' + get_option('gst-full-dynamic-types')
|
||||
]
|
||||
)
|
||||
|
||||
gstfull_link_args = cc.get_supported_link_arguments(['-Wl,-Bsymbolic-functions'])
|
||||
|
||||
# Get a list of libraries that needs to be exposed in the ABI.
|
||||
exposed_libs = []
|
||||
exposed_girs = []
|
||||
incdir_deps = []
|
||||
wanted_libs = ['gstreamer-1.0'] + get_option('gst-full-libraries')
|
||||
all_libs = '*' in wanted_libs
|
||||
|
||||
foreach pkgname_library : all_libraries
|
||||
pkg_name = pkgname_library[0]
|
||||
lib_def = pkgname_library[1]
|
||||
|
||||
if pkg_name in wanted_libs or all_libs
|
||||
if lib_def.has_key('lib')
|
||||
incdir_deps += dependency(pkg_name).partial_dependency(includes: true, sources: true)
|
||||
exposed_libs += [lib_def['lib']]
|
||||
endif
|
||||
|
||||
if lib_def.has_key('gir')
|
||||
exposed_girs += lib_def['gir']
|
||||
endif
|
||||
endif
|
||||
endforeach
|
||||
|
||||
# glib and gobject are part of our public API. If we are using glib from the
|
||||
# system then our pkg-config file must require it. If we built it as
|
||||
# subproject then we need to link_whole it.
|
||||
glib_deps = []
|
||||
glib_dep = dependency('glib-2.0')
|
||||
gobject_dep = dependency('gobject-2.0')
|
||||
if gobject_dep.type_name() == 'internal'
|
||||
glib_subproject = subproject('glib')
|
||||
exposed_libs += glib_subproject.get_variable('libglib')
|
||||
exposed_libs += glib_subproject.get_variable('libgobject')
|
||||
incdir_deps += [
|
||||
glib_dep.partial_dependency(includes: true),
|
||||
gobject_dep.partial_dependency(includes: true),
|
||||
]
|
||||
else
|
||||
glib_deps = [glib_dep, gobject_dep]
|
||||
endif
|
||||
|
||||
link_deps = []
|
||||
if get_option('gst-full-version-script') != ''
|
||||
symbol_map = meson.current_source_dir() / get_option('gst-full-version-script')
|
||||
link_arg = '-Wl,--version-script=' + symbol_map
|
||||
if cc.has_link_argument(link_arg)
|
||||
gstfull_link_args += link_arg
|
||||
link_deps += symbol_map
|
||||
elif cc.get_id() == 'msvc'
|
||||
warning('FIXME: Provide a def file to publish the public symbols')
|
||||
else
|
||||
warning('FIXME: Linker does not support the supplied version script (' + symbol_map + '), please disable the "gst-full-version-script" option')
|
||||
endif
|
||||
endif
|
||||
|
||||
# Build both shared and static library
|
||||
gstfull = both_libraries('gstreamer-full-1.0',
|
||||
init_static_plugins_c,
|
||||
link_with : all_plugins,
|
||||
link_args: gstfull_link_args,
|
||||
link_whole : exposed_libs,
|
||||
dependencies : incdir_deps + glib_deps,
|
||||
link_depends : link_deps,
|
||||
install : true,
|
||||
)
|
||||
|
||||
gst_full_dep = declare_dependency(link_with: gstfull.get_shared_lib(),
|
||||
dependencies : incdir_deps + glib_deps,
|
||||
include_directories: include_directories('.')
|
||||
)
|
||||
|
||||
gst_full_libs_private = cc.get_supported_link_arguments(['-Wl,--undefined=gst_init_static_plugins'])
|
||||
if gst_full_libs_private == []
|
||||
warning('The compiler does not support `-Wl,--undefined` linker flag. The method `gst_init_static_plugins` might be dropped during the link stage of an application using libgstreamer-full-1.0.a, preventing plugins registration.')
|
||||
endif
|
||||
|
||||
if not get_option('introspection').disabled()
|
||||
built_girs = {}
|
||||
foreach gir: exposed_girs
|
||||
includes = []
|
||||
foreach include: gir.get('includes', [])
|
||||
includes += [built_girs.get(include, include)]
|
||||
endforeach
|
||||
|
||||
gir += {
|
||||
'includes': includes,
|
||||
'extra_args': gir.get('extra_args', []) + ['--add-include-path=' + meson.current_build_dir()],
|
||||
'install': true,
|
||||
}
|
||||
built_girs += {gir.get('namespace') + '-' + gir.get('nsversion'): gnome.generate_gir(gstfull, kwargs: gir)[0]}
|
||||
endforeach
|
||||
endif
|
||||
|
||||
pkgconfig.generate(gstfull,
|
||||
requires: glib_deps,
|
||||
libraries_private: gst_full_libs_private,
|
||||
subdirs : 'gstreamer-1.0')
|
||||
meson.override_dependency('gstreamer-full-1.0', gst_full_dep)
|
||||
endif
|
||||
|
||||
message('Building subprojects: ' + ', '.join(subprojects_names))
|
||||
|
||||
setenv = find_program('gst-env.py')
|
||||
|
||||
devenv_cmd = [setenv, '--builddir=@0@'.format(meson.build_root()),
|
||||
'--srcdir=@0@'.format(meson.source_root())]
|
||||
|
||||
subdir('tests')
|
||||
if meson.can_run_host_binaries() and build_machine.system() == 'linux' and host_machine.system() == 'windows'
|
||||
# FIXME: Ideally we could get the wrapper directly from meson
|
||||
devenv_cmd += ['--wine', host_machine.cpu_family() == 'x86_64' ? 'wine64' : 'wine32']
|
||||
sysroot = meson.get_cross_property('sys_root')
|
||||
if sysroot != ''
|
||||
# Logic from meson
|
||||
devenv_cmd += ['--winepath', 'Z:' + join_paths(sysroot, 'bin')]
|
||||
endif
|
||||
endif
|
||||
|
||||
run_target('devenv', command : devenv_cmd)
|
||||
|
||||
if orc_subproject.found() and orc_update_targets.length() > 0
|
||||
alias_target('update-orc-dist', orc_update_targets)
|
||||
endif
|
||||
|
||||
summary({
|
||||
'gstreamer-full': get_option('default_library') == 'static',
|
||||
}, section: 'Build options', bool_yn: true, list_sep: ' ')
|
48
meson_options.txt
Normal file
48
meson_options.txt
Normal file
@ -0,0 +1,48 @@
|
||||
# Subproject options
|
||||
option('python', type : 'feature', value : 'auto')
|
||||
option('libav', type : 'feature', value : 'auto')
|
||||
option('libnice', type : 'feature', value : 'auto')
|
||||
option('base', type : 'feature', value : 'enabled')
|
||||
option('good', type : 'feature', value : 'enabled')
|
||||
option('ugly', type : 'feature', value : 'auto')
|
||||
option('bad', type : 'feature', value : 'auto')
|
||||
option('devtools', type : 'feature', value : 'auto')
|
||||
option('ges', type : 'feature', value : 'auto')
|
||||
option('rtsp_server', type : 'feature', value : 'auto')
|
||||
option('omx', type : 'feature', value : 'disabled')
|
||||
option('vaapi', type : 'feature', value : 'disabled')
|
||||
option('sharp', type : 'feature', value : 'disabled')
|
||||
option('rs', type : 'feature', value : 'disabled')
|
||||
option('gst-examples', type : 'feature', value : 'auto', description : 'Build gst-examples')
|
||||
option('tls', type : 'feature', value : 'auto', description : 'TLS support using glib-networking')
|
||||
option('qt5', type : 'feature', value : 'auto', description : 'Qt5 Support')
|
||||
|
||||
# Other options
|
||||
option('custom_subprojects', type : 'string', value : '', description : 'Comma-separated project names')
|
||||
option('gst-full-libraries', type : 'array', value : [],
|
||||
description : '''List of libraries to expose in gstreamer-full's ABI. gstreamer, glib and gobject are always included.''')
|
||||
option('gst-full-version-script', type : 'string', value: 'data/misc/gstreamer-full-default.map',
|
||||
description : 'path of the version script to be used by the linker, see https://www.gnu.org/software/gnulib/manual/html_node/LD-Version-Scripts.html')
|
||||
option('gst-full-plugins', type : 'string', value : '*',
|
||||
description : '''List of plugins to expose in gstreamer-full's ABI with the syntax plugin1;plugin2. By default '*' will export all plugins enabled by the build process.''')
|
||||
option('gst-full-elements', type : 'string', value : '',
|
||||
description : '''List of elements to expose in gstreamer-full's ABI with the syntax plugin1;plugin2:element1,element2. By default '' will export all element of the enabled plugin.''')
|
||||
option('gst-full-typefind-functions', type : 'string', value : '',
|
||||
description : '''List of typefind functions to expose in gstreamer-full's ABI with the syntax plugin:func1,func2. By default '' will export all typefind functions of the enabled plugin.''')
|
||||
option('gst-full-device-providers', type : 'string', value : '',
|
||||
description : '''List of device providers to expose in gstreamer-full's ABI with the syntax plugin1:dp1;plugin2:dp1:dp2. By default '' will export all device provider of the enabled plugin.''')
|
||||
option('gst-full-dynamic-types', type : 'string', value : '',
|
||||
description : '''List of dynamic types to expose in gstreamer-full's ABI with the syntax plugin:dt1,dt2. By default '' will export all device provider of the enabled plugin.''')
|
||||
|
||||
# License-related feature options
|
||||
option('gpl', type: 'feature', value: 'disabled',
|
||||
description: 'Allow build of plugins that have (A)GPL-licensed dependencies')
|
||||
|
||||
# Common options, automatically inherited by subprojects
|
||||
option('tests', type : 'feature', value : 'auto', description : 'Build tests')
|
||||
option('examples', type : 'feature', value : 'auto', description : 'Build examples')
|
||||
option('introspection', type : 'feature', value : 'auto', description : 'Generate introspection data')
|
||||
option('nls', type : 'feature', value : 'auto', description : 'Enable native language support (translations)')
|
||||
option('orc', type : 'feature', value : 'auto', description : 'Enable Optimized Inner Loop Runtime Compiler')
|
||||
option('doc', type : 'feature', value : 'auto', description : 'Generate API documentation with hotdoc')
|
||||
option('gtk_doc', type : 'feature', value : 'disabled', description : 'Generate API documentation with gtk-doc')
|
0
scripts/__init__.py
Normal file
0
scripts/__init__.py
Normal file
27
scripts/check-clean-repos.py
Executable file
27
scripts/check-clean-repos.py
Executable file
@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from common import git
|
||||
|
||||
|
||||
SCRIPTDIR = os.path.realpath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
subprojects_dir = os.path.join(SCRIPTDIR, "..", "subprojects")
|
||||
exitcode = 0
|
||||
for repo_name in os.listdir(subprojects_dir):
|
||||
repo_dir = os.path.normpath(os.path.join(SCRIPTDIR, subprojects_dir, repo_name))
|
||||
if not os.path.exists(os.path.join(repo_dir, '.git')):
|
||||
continue
|
||||
|
||||
diff = git('diff', repository_path=repo_dir).strip('\n')
|
||||
if diff:
|
||||
print('ERROR: Repository %s is not clean' % repo_dir)
|
||||
print('NOTE: Make sure to commit necessary changes in the gst_plugins_cache.json files')
|
||||
print(diff)
|
||||
exitcode += 1
|
||||
|
||||
sys.exit(exitcode)
|
159
scripts/common.py
Normal file
159
scripts/common.py
Normal file
@ -0,0 +1,159 @@
|
||||
import os
|
||||
import sys
|
||||
import shlex
|
||||
import shutil
|
||||
import argparse
|
||||
import platform
|
||||
import subprocess
|
||||
import uuid
|
||||
|
||||
|
||||
ROOTDIR = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
if os.name == 'nt':
|
||||
import ctypes
|
||||
from ctypes import wintypes
|
||||
_GetShortPathNameW = ctypes.windll.kernel32.GetShortPathNameW
|
||||
_GetShortPathNameW.argtypes = [wintypes.LPCWSTR, wintypes.LPWSTR, wintypes.DWORD]
|
||||
_GetShortPathNameW.restype = wintypes.DWORD
|
||||
|
||||
def win32_get_short_path_name(long_name):
|
||||
"""
|
||||
Gets the short path name of a given long path.
|
||||
http://stackoverflow.com/a/23598461/200291
|
||||
"""
|
||||
output_buf_size = 0
|
||||
while True:
|
||||
output_buf = ctypes.create_unicode_buffer(output_buf_size)
|
||||
needed = _GetShortPathNameW(long_name, output_buf, output_buf_size)
|
||||
if output_buf_size >= needed:
|
||||
return output_buf.value
|
||||
else:
|
||||
output_buf_size = needed
|
||||
|
||||
|
||||
def get_wine_shortpath(winecmd, wine_paths):
|
||||
seen = set()
|
||||
wine_paths += [p for p in wine_paths if not (p in seen or seen.add(p))]
|
||||
|
||||
getShortPathScript = '%s.bat' % str(uuid.uuid4()).lower()[:5]
|
||||
with open(getShortPathScript, mode='w') as f:
|
||||
f.write("@ECHO OFF\nfor %%x in (%*) do (\n echo|set /p=;%~sx\n)\n")
|
||||
f.flush()
|
||||
try:
|
||||
with open(os.devnull, 'w') as stderr:
|
||||
wine_path = subprocess.check_output(
|
||||
winecmd +
|
||||
['cmd', '/C', getShortPathScript] + wine_paths,
|
||||
stderr=stderr).decode('utf-8')
|
||||
except subprocess.CalledProcessError as e:
|
||||
print("Could not get short paths: %s" % e)
|
||||
wine_path = ';'.join(wine_paths)
|
||||
finally:
|
||||
os.remove(getShortPathScript)
|
||||
if len(wine_path) > 2048:
|
||||
raise AssertionError('WINEPATH size {} > 2048'
|
||||
' this will cause random failure.'.format(
|
||||
len(wine_path)))
|
||||
return wine_path
|
||||
|
||||
|
||||
class Colors:
|
||||
HEADER = '\033[95m'
|
||||
OKBLUE = '\033[94m'
|
||||
OKGREEN = '\033[92m'
|
||||
WARNING = '\033[93m'
|
||||
FAIL = '\033[91m'
|
||||
ENDC = '\033[0m'
|
||||
|
||||
force_disable = False
|
||||
|
||||
def _windows_ansi():
|
||||
from ctypes import windll, byref
|
||||
from ctypes.wintypes import DWORD
|
||||
|
||||
kernel = windll.kernel32
|
||||
stdout = kernel.GetStdHandle(-11)
|
||||
mode = DWORD()
|
||||
if not kernel.GetConsoleMode(stdout, byref(mode)):
|
||||
return False
|
||||
# Try setting ENABLE_VIRTUAL_TERMINAL_PROCESSING (0x4)
|
||||
# If that fails (returns 0), we disable colors
|
||||
return kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON')
|
||||
|
||||
@classmethod
|
||||
def can_enable(cls):
|
||||
if not os.isatty(sys.stdout.fileno()):
|
||||
return False
|
||||
if platform.system().lower() == 'windows':
|
||||
return cls._windows_ansi()
|
||||
return os.environ.get('TERM') != 'dumb'
|
||||
|
||||
@classmethod
|
||||
def disable(cls):
|
||||
cls.HEADER = ''
|
||||
cls.OKBLUE = ''
|
||||
cls.OKGREEN = ''
|
||||
cls.WARNING = ''
|
||||
cls.FAIL = ''
|
||||
cls.ENDC = ''
|
||||
|
||||
@classmethod
|
||||
def enable(cls):
|
||||
if cls.force_disable:
|
||||
return
|
||||
|
||||
cls.HEADER = '\033[95m'
|
||||
cls.OKBLUE = '\033[94m'
|
||||
cls.OKGREEN = '\033[92m'
|
||||
cls.WARNING = '\033[93m'
|
||||
cls.FAIL = '\033[91m'
|
||||
cls.ENDC = '\033[0m'
|
||||
|
||||
|
||||
|
||||
def git(*args, repository_path='.', fatal=True):
|
||||
try:
|
||||
ret = subprocess.check_output(["git"] + list(args), cwd=repository_path,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stderr=subprocess.STDOUT).decode()
|
||||
except subprocess.CalledProcessError as e:
|
||||
if fatal:
|
||||
raise e
|
||||
print("Non-fatal error running git {}:\n{}".format(' '.join(args), e))
|
||||
return None
|
||||
return ret
|
||||
|
||||
def accept_command(commands):
|
||||
"""Search @commands and returns the first found absolute path."""
|
||||
for command in commands:
|
||||
command = shutil.which(command)
|
||||
if command:
|
||||
return command
|
||||
return None
|
||||
|
||||
def get_meson():
|
||||
meson = os.path.join(ROOTDIR, 'meson', 'meson.py')
|
||||
if os.path.exists(meson):
|
||||
return [sys.executable, meson]
|
||||
|
||||
mesonintrospect = os.environ.get('MESONINTROSPECT', '')
|
||||
for comp in shlex.split (mesonintrospect):
|
||||
# mesonintrospect might look like "/usr/bin/python /somewhere/meson introspect",
|
||||
# let's not get tricked
|
||||
if 'python' in os.path.basename (comp):
|
||||
continue
|
||||
if os.path.exists(comp):
|
||||
if comp.endswith('.py'):
|
||||
return [sys.executable, comp]
|
||||
else:
|
||||
return [comp]
|
||||
|
||||
meson = accept_command(['meson.py'])
|
||||
if meson:
|
||||
return [sys.executable, meson]
|
||||
meson = accept_command(['meson'])
|
||||
if meson:
|
||||
return [meson]
|
||||
raise RuntimeError('Could not find Meson')
|
118
scripts/generate_init_static_plugins.py
Normal file
118
scripts/generate_init_static_plugins.py
Normal file
@ -0,0 +1,118 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import os
|
||||
from string import Template
|
||||
|
||||
TEMPLATE = Template('''
|
||||
#include <gst/gst.h>
|
||||
|
||||
$elements_declaration
|
||||
$typefind_funcs_declaration
|
||||
$device_providers_declaration
|
||||
$dynamic_types_declaration
|
||||
$plugins_declaration
|
||||
|
||||
void
|
||||
gst_init_static_plugins (void)
|
||||
{
|
||||
static gsize initialization_value = 0;
|
||||
if (g_once_init_enter (&initialization_value)) {
|
||||
$elements_registration
|
||||
$typefind_funcs_registration
|
||||
$device_providers_registration
|
||||
$dynamic_types_registration
|
||||
$plugins_registration
|
||||
|
||||
g_once_init_leave (&initialization_value, 1);
|
||||
}
|
||||
}
|
||||
''')
|
||||
# Retrieve the plugin name as it can be a plugin filename
|
||||
def get_plugin_name(name):
|
||||
for p in plugins:
|
||||
if name in p:
|
||||
return p
|
||||
return ''
|
||||
|
||||
def process_features(features_list, plugins, feature_prefix):
|
||||
plugins_list = plugins
|
||||
feature_declaration = []
|
||||
feature_registration = []
|
||||
if features_list is not None:
|
||||
feature_plugins = features_list.split(';')
|
||||
for plugin in feature_plugins:
|
||||
split = plugin.split(':')
|
||||
plugin_name = split[0].strip()
|
||||
if len(split) == 2:
|
||||
if (get_plugin_name(plugin_name)) != '':
|
||||
plugins_list.remove(get_plugin_name(plugin_name))
|
||||
features = split[1].split(',')
|
||||
for feature in features:
|
||||
feature = feature.replace("-", "_")
|
||||
feature_declaration += ['%s_REGISTER_DECLARE(%s);' % (feature_prefix, feature)]
|
||||
feature_registration += ['%s_REGISTER(%s, NULL);' % (feature_prefix, feature)]
|
||||
return (plugins_list, feature_declaration, feature_registration)
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-o', dest="output", help="Output file")
|
||||
parser.add_argument('-p','--plugins', nargs='?', default='', dest="plugins", help="The list of plugins")
|
||||
parser.add_argument('-e', '--elements', nargs='?', default='', dest="elements", help="The list of plugin:elements")
|
||||
parser.add_argument('-t', '--type-finds', nargs='?', default='', dest="typefindfuncs", help="The list of plugin:typefinds")
|
||||
parser.add_argument('-d', '--devide-providers', nargs='?', default='', dest="deviceproviders", help="The list of plugin:deviceproviders")
|
||||
parser.add_argument('-T', '--dynamic-types', nargs='?', default='', dest="dynamictypes", help="The list of plugin:dynamictypes")
|
||||
options = parser.parse_args()
|
||||
if options.output is None:
|
||||
output_file = 'gstinitstaticplugins.c'
|
||||
else:
|
||||
output_file = options.output
|
||||
enable_staticelements_plugin = 0;
|
||||
elements_declaration = []
|
||||
elements_registration = []
|
||||
typefind_funcs_declaration = []
|
||||
typefind_funcs_registration = []
|
||||
device_providers_declaration = []
|
||||
device_providers_registration = []
|
||||
dynamic_types_declaration = []
|
||||
dynamic_types_registration = []
|
||||
plugins_declaration = []
|
||||
plugins_registration = []
|
||||
|
||||
if options.plugins is None or options.plugins.isspace():
|
||||
plugins = []
|
||||
else:
|
||||
plugins = options.plugins.split(';')
|
||||
|
||||
# process the features
|
||||
(plugins, elements_declaration, elements_registration) = process_features(options.elements, plugins, 'GST_ELEMENT')
|
||||
(plugins, typefind_funcs_declaration, typefind_funcs_registration) = process_features(options.typefindfuncs, plugins, 'GST_TYPE_FIND')
|
||||
(plugins, device_providers_declaration, device_providers_registration) = process_features(options.deviceproviders, plugins, 'GST_DEVICE_PROVIDER')
|
||||
(plugins, dynamic_types_declaration, dynamic_types_registration) = process_features(options.dynamictypes, plugins, 'GST_DYNAMIC_TYPE')
|
||||
|
||||
# Enable plugin or elements according to the ';' separated list.
|
||||
for plugin in plugins:
|
||||
split = plugin.split(':')
|
||||
plugin_name = split[0]
|
||||
if plugin_name == '':
|
||||
continue
|
||||
filename = os.path.basename(plugin)
|
||||
if filename.startswith('libgst') and filename.endswith('.a'):
|
||||
plugin_name = filename[len('libgst'):-len('.a')]
|
||||
plugins_registration += ['GST_PLUGIN_STATIC_REGISTER(%s);' % (plugin_name)]
|
||||
plugins_declaration += ['GST_PLUGIN_STATIC_DECLARE(%s);' % (plugin_name)]
|
||||
|
||||
with open(output_file.strip(), "w") as f:
|
||||
static_elements_plugin = ''
|
||||
f.write(TEMPLATE.substitute({
|
||||
'elements_declaration': '\n'.join(elements_declaration),
|
||||
'elements_registration': '\n '.join(elements_registration),
|
||||
'typefind_funcs_declaration': '\n'.join(typefind_funcs_declaration),
|
||||
'typefind_funcs_registration': '\n '.join(typefind_funcs_registration),
|
||||
'device_providers_declaration': '\n'.join(device_providers_declaration),
|
||||
'device_providers_registration': '\n '.join(device_providers_registration),
|
||||
'dynamic_types_declaration': '\n'.join(dynamic_types_declaration),
|
||||
'dynamic_types_registration': '\n '.join(dynamic_types_registration),
|
||||
'plugins_declaration': '\n'.join(plugins_declaration),
|
||||
'plugins_registration': '\n '.join(plugins_registration),
|
||||
}))
|
19
scripts/generate_plugins_path.py
Normal file
19
scripts/generate_plugins_path.py
Normal file
@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import json
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(dest="output", help="Output file")
|
||||
parser.add_argument(dest="plugins", help="The list of plugins")
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
all_paths = set()
|
||||
for plugin in options.plugins.split(os.pathsep):
|
||||
all_paths.add(os.path.dirname(plugin))
|
||||
|
||||
with open(options.output, "w") as f:
|
||||
json.dump(list(all_paths), f, indent=4, sort_keys=True)
|
49
scripts/git-hooks/multi-pre-commit.hook
Executable file
49
scripts/git-hooks/multi-pre-commit.hook
Executable file
@ -0,0 +1,49 @@
|
||||
#!/bin/sh
|
||||
# Git pre-commit hook that runs multiple hooks specified in $HOOKS.
|
||||
# Make sure this script is executable. Bypass hooks with git commit --no-verify.
|
||||
|
||||
# This file is inspired by a set of unofficial pre-commit hooks available
|
||||
# at github.
|
||||
# Link: https://github.com/githubbrowser/Pre-commit-hooks
|
||||
# Contact: David Martin, david.martin.mailbox@googlemail.com
|
||||
|
||||
|
||||
###########################################################
|
||||
# SETTINGS:
|
||||
# pre-commit hooks to be executed. They should be in the same .git/hooks/ folder
|
||||
# as this script. Hooks should return 0 if successful and nonzero to cancel the
|
||||
# commit. They are executed in the order in which they are listed.
|
||||
###########################################################
|
||||
|
||||
HOOKS="scripts/git-hooks/pre-commit.hook scripts/git-hooks/pre-commit-python.hook"
|
||||
|
||||
# exit on error
|
||||
set -e
|
||||
|
||||
if [ "$GST_DISABLE_PRE_COMMIT_HOOKS" = "1" ]
|
||||
then
|
||||
echo "Pre-commits hooks disabled by env GST_DISABLE_PRE_COMMIT_HOOKS."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo $PWD
|
||||
|
||||
for hook in $HOOKS
|
||||
do
|
||||
echo "Running hook: $hook"
|
||||
# run hook if it exists
|
||||
# if it returns with nonzero exit with 1 and thus abort the commit
|
||||
if [ -f "$PWD/$hook" ]; then
|
||||
"$PWD/$hook"
|
||||
if [ $? != 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Error: file $hook not found."
|
||||
echo "Aborting commit. Make sure the hook is at $PWD/$hook and executable."
|
||||
echo "You can disable it by removing it from the list"
|
||||
echo "You can skip all pre-commit hooks with --no-verify (not recommended)."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
81
scripts/git-hooks/pre-commit-python.hook
Executable file
81
scripts/git-hooks/pre-commit-python.hook
Executable file
@ -0,0 +1,81 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
NOT_PYCODESTYLE_COMPLIANT_MESSAGE_PRE = \
|
||||
"Your code is not fully pycodestyle compliant and contains"\
|
||||
" the following coding style issues:\n\n"
|
||||
|
||||
NOT_PYCODESTYLE_COMPLIANT_MESSAGE_POST = \
|
||||
"Please fix these errors and commit again, you can do so "\
|
||||
"from the root directory automatically like this, assuming the whole "\
|
||||
"file is to be commited:"
|
||||
|
||||
NO_PYCODESTYLE_MESSAGE = \
|
||||
"You should install the pycodestyle style checker to be able"\
|
||||
" to commit in this repo.\nIt allows us to garantee that "\
|
||||
"anything that is commited respects the pycodestyle coding style "\
|
||||
"standard.\nYou can install it:\n"\
|
||||
" * on ubuntu, debian: $sudo apt-get install pycodestyle \n"\
|
||||
" * on fedora: #yum install python3-pycodestyle \n"\
|
||||
" * on arch: #pacman -S python-pycodestyle \n"\
|
||||
" * or `pip install --user pycodestyle`"
|
||||
|
||||
|
||||
def system(*args, **kwargs):
|
||||
kwargs.setdefault('stdout', subprocess.PIPE)
|
||||
proc = subprocess.Popen(args, **kwargs)
|
||||
out, err = proc.communicate()
|
||||
if isinstance(out, bytes):
|
||||
out = out.decode()
|
||||
return out
|
||||
|
||||
|
||||
def copy_files_to_tmp_dir(files):
|
||||
tempdir = tempfile.mkdtemp()
|
||||
for name in files:
|
||||
filename = os.path.join(tempdir, name)
|
||||
filepath = os.path.dirname(filename)
|
||||
if not os.path.exists(filepath):
|
||||
os.makedirs(filepath)
|
||||
with open(filename, 'w') as f:
|
||||
system('git', 'show', ':' + name, stdout=f)
|
||||
|
||||
return tempdir
|
||||
|
||||
|
||||
def main():
|
||||
modified_files = system('git', 'diff-index', '--cached',
|
||||
'--name-only', 'HEAD', '--diff-filter=ACMR').split("\n")[:-1]
|
||||
non_compliant_files = []
|
||||
output_message = None
|
||||
|
||||
for modified_file in modified_files:
|
||||
try:
|
||||
if not modified_file.endswith(".py"):
|
||||
continue
|
||||
pycodestyle_errors = system('pycodestyle', '--repeat', '--ignore', 'E402,E501,E128,W605,W503', modified_file)
|
||||
if pycodestyle_errors:
|
||||
if output_message is None:
|
||||
output_message = NOT_PYCODESTYLE_COMPLIANT_MESSAGE_PRE
|
||||
output_message += pycodestyle_errors
|
||||
non_compliant_files.append(modified_file)
|
||||
except OSError as e:
|
||||
output_message = NO_PYCODESTYLE_MESSAGE
|
||||
break
|
||||
|
||||
if output_message:
|
||||
print(output_message)
|
||||
if non_compliant_files:
|
||||
print(NOT_PYCODESTYLE_COMPLIANT_MESSAGE_POST)
|
||||
for non_compliant_file in non_compliant_files:
|
||||
print("autopep8 -i --max-line-length 120", non_compliant_file, "; git add ",
|
||||
non_compliant_file)
|
||||
print("git commit")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
83
scripts/git-hooks/pre-commit.hook
Executable file
83
scripts/git-hooks/pre-commit.hook
Executable file
@ -0,0 +1,83 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Check that the code follows a consistant code style
|
||||
#
|
||||
|
||||
# Check for existence of indent, and error out if not present.
|
||||
# On some *bsd systems the binary seems to be called gnunindent,
|
||||
# so check for that first.
|
||||
|
||||
version=`gnuindent --version 2>/dev/null`
|
||||
if test "x$version" = "x"; then
|
||||
version=`gindent --version 2>/dev/null`
|
||||
if test "x$version" = "x"; then
|
||||
version=`indent --version 2>/dev/null`
|
||||
if test "x$version" = "x"; then
|
||||
echo "GStreamer git pre-commit hook:"
|
||||
echo "Did not find GNU indent, please install it before continuing."
|
||||
exit 1
|
||||
else
|
||||
INDENT=indent
|
||||
fi
|
||||
else
|
||||
INDENT=gindent
|
||||
fi
|
||||
else
|
||||
INDENT=gnuindent
|
||||
fi
|
||||
|
||||
case `$INDENT --version` in
|
||||
GNU*)
|
||||
;;
|
||||
default)
|
||||
echo "GStreamer git pre-commit hook:"
|
||||
echo "Did not find GNU indent, please install it before continuing."
|
||||
echo "(Found $INDENT, but it doesn't seem to be GNU indent)"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
INDENT_PARAMETERS="--braces-on-if-line \
|
||||
--case-brace-indentation0 \
|
||||
--case-indentation2 \
|
||||
--braces-after-struct-decl-line \
|
||||
--line-length80 \
|
||||
--no-tabs \
|
||||
--cuddle-else \
|
||||
--dont-line-up-parentheses \
|
||||
--continuation-indentation4 \
|
||||
--honour-newlines \
|
||||
--tab-size8 \
|
||||
--indent-level2 \
|
||||
--leave-preprocessor-space"
|
||||
|
||||
echo "--Checking style--"
|
||||
for file in `git diff-index --cached --name-only HEAD --diff-filter=ACMR| grep "\.c$"` ; do
|
||||
# nf is the temporary checkout. This makes sure we check against the
|
||||
# revision in the index (and not the checked out version).
|
||||
nf=`git checkout-index --temp ${file} | cut -f 1`
|
||||
newfile=`mktemp /tmp/${nf}.XXXXXX` || exit 1
|
||||
$INDENT ${INDENT_PARAMETERS} \
|
||||
$nf -o $newfile 2>> /dev/null
|
||||
# FIXME: Call indent twice as it tends to do line-breaks
|
||||
# different for every second call.
|
||||
$INDENT ${INDENT_PARAMETERS} \
|
||||
$newfile 2>> /dev/null
|
||||
diff -u -p "${nf}" "${newfile}"
|
||||
r=$?
|
||||
rm "${newfile}"
|
||||
rm "${nf}"
|
||||
if [ $r != 0 ] ; then
|
||||
echo "================================================================================================="
|
||||
echo " Code style error in: $file "
|
||||
echo " "
|
||||
echo " Please fix before committing. Don't forget to run git add before trying to commit again. "
|
||||
echo " If the whole file is to be committed, this should work (run from the top-level directory): "
|
||||
echo " "
|
||||
echo " gst-indent $file; git add $file; git commit"
|
||||
echo " "
|
||||
echo "================================================================================================="
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
echo "--Checking style pass--"
|
45
scripts/gst-indent
Executable file
45
scripts/gst-indent
Executable file
@ -0,0 +1,45 @@
|
||||
#!/bin/sh
|
||||
|
||||
for execname in gnuindent gindent indent; do
|
||||
version=`$execname --version 2>/dev/null`
|
||||
if test "x$version" != "x"; then
|
||||
INDENT=$execname
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if test -z $INDENT; then
|
||||
echo "GStreamer git pre-commit hook:"
|
||||
echo "Did not find GNU indent, please install it before continuing."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
case `$INDENT --version` in
|
||||
GNU*)
|
||||
;;
|
||||
default)
|
||||
echo "Did not find GNU indent, please install it before continuing."
|
||||
echo "(Found $INDENT, but it doesn't seem to be GNU indent)"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Run twice. GNU indent isn't idempotent
|
||||
# when run once
|
||||
for i in 1 2; do
|
||||
$INDENT \
|
||||
--braces-on-if-line \
|
||||
--case-brace-indentation0 \
|
||||
--case-indentation2 \
|
||||
--braces-after-struct-decl-line \
|
||||
--line-length80 \
|
||||
--no-tabs \
|
||||
--cuddle-else \
|
||||
--dont-line-up-parentheses \
|
||||
--continuation-indentation4 \
|
||||
--honour-newlines \
|
||||
--tab-size8 \
|
||||
--indent-level2 \
|
||||
--leave-preprocessor-space \
|
||||
$* || exit $?
|
||||
done
|
18
scripts/gst-indent-all
Executable file
18
scripts/gst-indent-all
Executable file
@ -0,0 +1,18 @@
|
||||
#!/bin/bash
|
||||
|
||||
BASEDIR=$(dirname $0)
|
||||
|
||||
|
||||
filter_cmd=("cat")
|
||||
if test -f ".indentignore"; then
|
||||
filter_args=()
|
||||
while read -r line; do
|
||||
if test -n "$line"; then
|
||||
filter_args+=("-e" "$line")
|
||||
fi
|
||||
done < ".indentignore"
|
||||
if [[ ${#filter_args[@]} -gt 0 ]]; then
|
||||
filter_cmd=("grep" "-v" "${filter_args[@]}")
|
||||
fi
|
||||
fi
|
||||
git ls-files "*.c" | "${filter_cmd[@]}" | xargs -d '\n' $BASEDIR/gst-indent
|
681
scripts/move_mrs_to_monorepo.py
Executable file
681
scripts/move_mrs_to_monorepo.py
Executable file
@ -0,0 +1,681 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from urllib.parse import urlparse
|
||||
from contextlib import contextmanager
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
try:
|
||||
import gitlab
|
||||
except ModuleNotFoundError:
|
||||
print("========================================================================", file=sys.stderr)
|
||||
print("ERROR: Install python-gitlab with `python3 -m pip install python-gitlab python-dateutil`", file=sys.stderr)
|
||||
print("========================================================================", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
from dateutil import parser as dateparse
|
||||
except ModuleNotFoundError:
|
||||
print("========================================================================", file=sys.stderr)
|
||||
print("ERROR: Install dateutil with `python3 -m pip install dateutil`", file=sys.stderr)
|
||||
print("========================================================================", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
import argparse
|
||||
import requests
|
||||
|
||||
import subprocess
|
||||
|
||||
ROOT_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
|
||||
URL = "https://gitlab.freedesktop.org/"
|
||||
SIGN_IN_URL = URL + 'sign_in'
|
||||
LOGIN_URL = URL + 'users/sign_in'
|
||||
LOGIN_URL_LDAP = URL + '/users/auth/ldapmain/callback'
|
||||
|
||||
MONOREPO_REMOTE_NAME = 'origin'
|
||||
NAMESPACE = "gstreamer"
|
||||
MONOREPO_NAME = 'gstreamer'
|
||||
MONOREPO_REMOTE = URL + f'{NAMESPACE}/{MONOREPO_NAME}'
|
||||
MONOREPO_BRANCH = 'main'
|
||||
PING_SIGN = '@'
|
||||
MOVING_NAMESPACE = NAMESPACE
|
||||
|
||||
PARSER = argparse.ArgumentParser(
|
||||
description="Move merge request from old GStreamer module to the new"
|
||||
"GStreamer 'monorepo'.\n"
|
||||
" All your pending merge requests from all GStreamer modules will"
|
||||
" be moved the the mono repository."
|
||||
)
|
||||
PARSER.add_argument("--skip-branch", action="store", nargs="*",
|
||||
help="Ignore MRs for branches which match those names.", dest="skipped_branches")
|
||||
PARSER.add_argument("--skip-on-failure", action="store_true", default=False)
|
||||
PARSER.add_argument("--dry-run", "-n", action="store_true", default=False)
|
||||
PARSER.add_argument("--use-branch-if-exists",
|
||||
action="store_true", default=False)
|
||||
PARSER.add_argument("--list-mrs-only", action="store_true", default=False)
|
||||
PARSER.add_argument(
|
||||
"-c",
|
||||
"--config-file",
|
||||
action="append",
|
||||
dest='config_files',
|
||||
help="Configuration file to use. Can be used multiple times.",
|
||||
required=False,
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"-g",
|
||||
"--gitlab",
|
||||
help=(
|
||||
"Which configuration section should "
|
||||
"be used. If not defined, the default selection "
|
||||
"will be used."
|
||||
),
|
||||
required=False,
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"-m",
|
||||
"--module",
|
||||
help="GStreamer module to move MRs for. All if none specified. Can be used multiple times.",
|
||||
dest='modules',
|
||||
action="append",
|
||||
required=False,
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"-mr",
|
||||
"--mr-url",
|
||||
default=None,
|
||||
type=str,
|
||||
help=(
|
||||
"URL of the MR to work on."
|
||||
),
|
||||
required=False,
|
||||
)
|
||||
|
||||
GST_PROJECTS = [
|
||||
'gstreamer',
|
||||
'gst-plugins-base',
|
||||
'gst-plugins-good',
|
||||
'gst-plugins-bad',
|
||||
'gst-plugins-ugly',
|
||||
'gst-libav',
|
||||
'gst-rtsp-server',
|
||||
'gstreamer-vaapi',
|
||||
'gstreamer-sharp',
|
||||
'gst-python',
|
||||
'gst-omx',
|
||||
'gst-editing-services',
|
||||
'gst-devtools',
|
||||
'gst-docs',
|
||||
'gst-examples',
|
||||
'gst-build',
|
||||
'gst-ci',
|
||||
]
|
||||
|
||||
GST_PROJECTS_ID = {
|
||||
'gstreamer': 1357,
|
||||
'gst-rtsp-server': 1362,
|
||||
'gstreamer-vaapi': 1359,
|
||||
'gstreamer-sharp': 1358,
|
||||
'gst-python': 1355,
|
||||
'gst-plugins-ugly': 1354,
|
||||
'gst-plugins-good': 1353,
|
||||
'gst-plugins-base': 1352,
|
||||
'gst-plugins-bad': 1351,
|
||||
'gst-omx': 1350,
|
||||
'gst-libav': 1349,
|
||||
'gst-integration-testsuites': 1348,
|
||||
'gst-examples': 1347,
|
||||
'gst-editing-services': 1346,
|
||||
'gst-docs': 1345,
|
||||
'gst-devtools': 1344,
|
||||
'gst-ci': 1343,
|
||||
'gst-build': 1342,
|
||||
}
|
||||
|
||||
# We do not want to deal with LFS
|
||||
os.environ["GIT_LFS_SKIP_SMUDGE"] = "1"
|
||||
|
||||
|
||||
log_depth = [] # type: T.List[str]
|
||||
|
||||
|
||||
@contextmanager
|
||||
def nested(name=''):
|
||||
global log_depth
|
||||
log_depth.append(name)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
log_depth.pop()
|
||||
|
||||
|
||||
def bold(text: str):
|
||||
return f"\033[1m{text}\033[0m"
|
||||
|
||||
|
||||
def green(text: str):
|
||||
return f"\033[1;32m{text}\033[0m"
|
||||
|
||||
|
||||
def red(text: str):
|
||||
return f"\033[1;31m{text}\033[0m"
|
||||
|
||||
|
||||
def yellow(text: str):
|
||||
return f"\033[1;33m{text}\033[0m"
|
||||
|
||||
|
||||
def fprint(msg, nested=True):
|
||||
if log_depth:
|
||||
prepend = log_depth[-1] + ' | ' if nested else ''
|
||||
else:
|
||||
prepend = ''
|
||||
|
||||
print(prepend + msg, end="")
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
class GstMRMover:
|
||||
def __init__(self):
|
||||
|
||||
self.modules = []
|
||||
self.gitlab = None
|
||||
self.config_files = []
|
||||
self.gl = None
|
||||
self.mr = None
|
||||
self.mr_url = None
|
||||
self.all_projects = []
|
||||
self.skipped_branches = []
|
||||
self.git_rename_limit = None
|
||||
self.skip_on_failure = None
|
||||
self.dry_run = False
|
||||
|
||||
def connect(self):
|
||||
fprint("Logging into gitlab...")
|
||||
|
||||
if self.gitlab:
|
||||
gl = gitlab.Gitlab.from_config(self.gitlab, self.config_files)
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
return gl
|
||||
|
||||
gitlab_api_token = os.environ.get('GITLAB_API_TOKEN')
|
||||
if gitlab_api_token:
|
||||
gl = gitlab.Gitlab(URL, private_token=gitlab_api_token)
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
return gl
|
||||
|
||||
session = requests.Session()
|
||||
sign_in_page = session.get(SIGN_IN_URL).content.decode()
|
||||
for line in sign_in_page.split('\n'):
|
||||
m = re.search('name="authenticity_token" value="([^"]+)"', line)
|
||||
if m:
|
||||
break
|
||||
|
||||
token = None
|
||||
if m:
|
||||
token = m.group(1)
|
||||
|
||||
if not token:
|
||||
fprint(f"{red('Unable to find the authenticity token')}\n")
|
||||
sys.exit(1)
|
||||
|
||||
for data, url in [
|
||||
({'user[login]': 'login_or_email',
|
||||
'user[password]': 'SECRET',
|
||||
'authenticity_token': token}, LOGIN_URL),
|
||||
({'username': 'login_or_email',
|
||||
'password': 'SECRET',
|
||||
'authenticity_token': token}, LOGIN_URL_LDAP)]:
|
||||
|
||||
r = session.post(url, data=data)
|
||||
if r.status_code != 200:
|
||||
continue
|
||||
|
||||
try:
|
||||
gl = gitlab.Gitlab(URL, api_version=4, session=session)
|
||||
gl.auth()
|
||||
except gitlab.exceptions.GitlabAuthenticationError as e:
|
||||
continue
|
||||
return gl
|
||||
|
||||
sys.exit(bold(f"{red('FAILED')}.\n\nPlease go to:\n\n"
|
||||
' https://gitlab.freedesktop.org/-/profile/personal_access_tokens\n\n'
|
||||
f'and generate a token {bold("with read/write access to all but the registry")},'
|
||||
' then set it in the "GITLAB_API_TOKEN" environment variable:"'
|
||||
f'\n\n $ GITLAB_API_TOKEN=<your token> {" ".join(sys.argv)}\n'))
|
||||
|
||||
def git(self, *args, can_fail=False, interaction_message=None, call=False, revert_operation=None):
|
||||
cwd = ROOT_DIR
|
||||
retry = True
|
||||
while retry:
|
||||
retry = False
|
||||
try:
|
||||
if not call:
|
||||
try:
|
||||
return subprocess.check_output(["git"] + list(args), cwd=cwd,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stderr=subprocess.STDOUT).decode()
|
||||
except subprocess.CalledProcessError:
|
||||
if not can_fail:
|
||||
fprint(
|
||||
f"\n\n{bold(red('ERROR'))}: `git {' '.join(args)}` failed" + "\n", nested=False)
|
||||
raise
|
||||
else:
|
||||
subprocess.call(["git"] + list(args), cwd=cwd)
|
||||
return "All good"
|
||||
except Exception as e:
|
||||
if interaction_message:
|
||||
if self.skip_on_failure:
|
||||
return "SKIP"
|
||||
output = getattr(e, "output", b"")
|
||||
if output is not None:
|
||||
out = output.decode()
|
||||
else:
|
||||
out = "????"
|
||||
fprint(f"\n```"
|
||||
f"\n{out}\n"
|
||||
f"Entering a shell in {cwd} to fix:\n\n"
|
||||
f" {bold(interaction_message)}\n\n"
|
||||
f"You should then exit with the following codes:\n\n"
|
||||
f" - {bold('`exit 0`')}: once you have fixed the problem and we can keep moving the merge request\n"
|
||||
f" - {bold('`exit 1`')}: {bold('retry')}: once you have let the repo in a state where the operation should be to retried\n"
|
||||
f" - {bold('`exit 2`')}: to skip that merge request\n"
|
||||
f" - {bold('`exit 3`')}: stop the script and abandon moving your MRs\n"
|
||||
"\n```\n", nested=False)
|
||||
try:
|
||||
if os.name == 'nt':
|
||||
shell = os.environ.get(
|
||||
"COMSPEC", r"C:\WINDOWS\system32\cmd.exe")
|
||||
else:
|
||||
shell = os.environ.get(
|
||||
"SHELL", os.path.realpath("/bin/sh"))
|
||||
subprocess.check_call(shell, cwd=cwd)
|
||||
except subprocess.CalledProcessError as e:
|
||||
if e.returncode == 1:
|
||||
retry = True
|
||||
continue
|
||||
elif e.returncode == 2:
|
||||
if revert_operation:
|
||||
self.git(*revert_operation, can_fail=True)
|
||||
return "SKIP"
|
||||
elif e.returncode == 3:
|
||||
if revert_operation:
|
||||
self.git(*revert_operation, can_fail=True)
|
||||
sys.exit(3)
|
||||
except Exception:
|
||||
# Result of subshell does not really matter
|
||||
pass
|
||||
|
||||
return "User fixed it"
|
||||
|
||||
if can_fail:
|
||||
return "Failed but we do not care"
|
||||
|
||||
raise e
|
||||
|
||||
def cleanup_args(self):
|
||||
if self.mr_url:
|
||||
self.modules.append(GST_PROJECTS[0])
|
||||
(namespace, module, _, _, mr) = os.path.normpath(urlparse(self.mr_url).path).split('/')[1:]
|
||||
self.modules.append(module)
|
||||
self.mr = int(mr)
|
||||
elif not self.modules:
|
||||
if self.mr:
|
||||
sys.exit(f"{red(f'Merge request #{self.mr} specified without module')}\n\n"
|
||||
f"{bold(' -> Use `--module` to specify which module the MR is from.')}")
|
||||
|
||||
self.modules = GST_PROJECTS
|
||||
else:
|
||||
VALID_PROJECTS = GST_PROJECTS[1:]
|
||||
for m in self.modules:
|
||||
if m not in VALID_PROJECTS:
|
||||
projects = '\n- '.join(VALID_PROJECTS)
|
||||
sys.exit(
|
||||
f"{red(f'Unknown module {m}')}\nModules are:\n- {projects}")
|
||||
if self.mr and len(self.modules) > 1:
|
||||
sys.exit(f"{red(f'Merge request #{self.mr} specified but several modules where specified')}\n\n"
|
||||
f"{bold(' -> Use `--module` only once to specify an merge request.')}")
|
||||
self.modules.append(GST_PROJECTS[0])
|
||||
|
||||
def run(self):
|
||||
self.cleanup_args()
|
||||
self.gl = self.connect()
|
||||
self.gl.auth()
|
||||
|
||||
# Skip pre-commit hooks when migrating. Some users may have a
|
||||
# different version of gnu indent and that can lead to cherry-pick
|
||||
# failing.
|
||||
os.environ["GST_DISABLE_PRE_COMMIT_HOOKS"] = "1"
|
||||
|
||||
try:
|
||||
prevbranch = self.git(
|
||||
"rev-parse", "--abbrev-ref", "HEAD", can_fail=True).strip()
|
||||
except Exception:
|
||||
fprint(bold(yellow("Not on a branch?\n")), indent=False)
|
||||
prevbranch = None
|
||||
|
||||
try:
|
||||
self.setup_repo()
|
||||
|
||||
from_projects, to_project = self.fetch_projects()
|
||||
|
||||
with nested(' '):
|
||||
self.move_mrs(from_projects, to_project)
|
||||
finally:
|
||||
if self.git_rename_limit is not None:
|
||||
self.git("config", "merge.renameLimit",
|
||||
str(self.git_rename_limit))
|
||||
if prevbranch:
|
||||
fprint(f'Back to {prevbranch}\n')
|
||||
self.git("checkout", prevbranch)
|
||||
|
||||
def fetch_projects(self):
|
||||
fprint("Fetching projects... ")
|
||||
self.all_projects = [proj for proj in self.gl.projects.list(
|
||||
membership=1, all=True) if proj.name in self.modules]
|
||||
|
||||
try:
|
||||
self.user_project, = [p for p in self.all_projects
|
||||
if p.namespace['path'] == self.gl.user.username
|
||||
and p.name == MONOREPO_NAME]
|
||||
except ValueError:
|
||||
fprint(
|
||||
f"{red(f'ERROR')}\n\nCould not find repository {self.gl.user.name}/{MONOREPO_NAME}")
|
||||
fprint(f"{red(f'Got to https://gitlab.freedesktop.org/gstreamer/gstreamer/ and create a fork so we can move your Merge requests.')}")
|
||||
sys.exit(1)
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
|
||||
from_projects = []
|
||||
user_projects_name = [proj.name for proj in self.all_projects if proj.namespace['path']
|
||||
== self.gl.user.username and proj.name in GST_PROJECTS]
|
||||
for project, id in GST_PROJECTS_ID.items():
|
||||
if project not in user_projects_name or project == 'gstreamer':
|
||||
continue
|
||||
|
||||
projects = [p for p in self.all_projects if p.id == id]
|
||||
if not projects:
|
||||
upstream_project = self.gl.projects.get(id)
|
||||
else:
|
||||
upstream_project, = projects
|
||||
assert project
|
||||
|
||||
from_projects.append(upstream_project)
|
||||
|
||||
fprint(f"\nMoving MRs from:\n")
|
||||
fprint(f"----------------\n")
|
||||
for p in from_projects:
|
||||
fprint(f" - {bold(p.path_with_namespace)}\n")
|
||||
|
||||
to_project = self.gl.projects.get(GST_PROJECTS_ID['gstreamer'])
|
||||
fprint(f"To: {bold(to_project.path_with_namespace)}\n\n")
|
||||
|
||||
return from_projects, to_project
|
||||
|
||||
def recreate_mr(self, project, to_project, mr):
|
||||
branch = f"{project.name}-{mr.source_branch}"
|
||||
if not self.create_branch_for_mr(branch, project, mr):
|
||||
return None
|
||||
|
||||
description = f"**Copied from {URL}/{project.path_with_namespace}/-/merge_requests/{mr.iid}**\n\n{mr.description}"
|
||||
|
||||
title = mr.title
|
||||
if ':' not in mr.title:
|
||||
title = f"{project.name}: {mr.title}"
|
||||
|
||||
new_mr_dict = {
|
||||
'source_branch': branch,
|
||||
'allow_collaboration': True,
|
||||
'remove_source_branch': True,
|
||||
'target_project_id': to_project.id,
|
||||
'target_branch': MONOREPO_BRANCH,
|
||||
'title': title,
|
||||
'labels': mr.labels,
|
||||
'description': description,
|
||||
}
|
||||
|
||||
try:
|
||||
fprint(f"-> Recreating MR '{bold(mr.title)}'...")
|
||||
if self.dry_run:
|
||||
fprint(f"\nDry info:\n{new_mr_dict}\n")
|
||||
else:
|
||||
new_mr = self.user_project.mergerequests.create(new_mr_dict)
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
except gitlab.exceptions.GitlabCreateError as e:
|
||||
fprint(f"{yellow('SKIPPED')} (An MR already exists)\n", nested=False)
|
||||
return None
|
||||
|
||||
fprint(f"-> Adding discussings from MR '{mr.title}'...")
|
||||
if self.dry_run:
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
return None
|
||||
|
||||
new_mr_url = f"{URL}/{to_project.path_with_namespace}/-/merge_requests/{new_mr.iid}"
|
||||
for issue in mr.closes_issues():
|
||||
obj = {'body': f'Fixing MR moved to: {new_mr_url}'}
|
||||
issue.discussions.create(obj)
|
||||
|
||||
mr_url = f"{URL}/{project.path_with_namespace}/-/merge_requests/{mr.iid}"
|
||||
for discussion in mr.discussions.list():
|
||||
# FIXME notes = [n for n in discussion.attributes['notes'] if n['type'] is not None]
|
||||
notes = [n for n in discussion.attributes['notes']]
|
||||
if not notes:
|
||||
continue
|
||||
|
||||
new_discussion = None
|
||||
for note in notes:
|
||||
note = discussion.notes.get(note['id'])
|
||||
|
||||
note_url = f"{mr_url}#note_{note.id}"
|
||||
when = dateparse.parse(
|
||||
note.created_at).strftime('on %d, %b %Y')
|
||||
body = f"**{note.author['name']} - {PING_SIGN}{note.author['username']} wrote [here]({note_url})** {when}:\n\n"
|
||||
body += '\n'.join([line for line in note.body.split('\n')])
|
||||
|
||||
obj = {
|
||||
'body': body,
|
||||
'type': note.type,
|
||||
'resolvable': note.resolvable,
|
||||
}
|
||||
|
||||
if new_discussion:
|
||||
new_discussion.notes.create(obj)
|
||||
else:
|
||||
new_discussion = new_mr.discussions.create(obj)
|
||||
|
||||
if not note.resolvable or note.resolved:
|
||||
new_discussion.resolved = True
|
||||
new_discussion.save()
|
||||
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
|
||||
print(f"New MR available at: {bold(new_mr_url)}\n")
|
||||
|
||||
return new_mr
|
||||
|
||||
def push_branch(self, branch):
|
||||
fprint(
|
||||
f"-> Pushing branch {branch} to remote {self.gl.user.username}...")
|
||||
if self.git("push", "--no-verify", self.gl.user.username, branch,
|
||||
interaction_message=f"pushing {branch} to {self.gl.user.username} with:\n "
|
||||
f" `$git push {self.gl.user.username} {branch}`") == "SKIP":
|
||||
fprint(yellow("'SKIPPED' (couldn't push)"), nested=False)
|
||||
|
||||
return False
|
||||
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
|
||||
return True
|
||||
|
||||
def create_branch_for_mr(self, branch, project, mr):
|
||||
remote_name = project.name + '-' + self.gl.user.username
|
||||
remote_branch = f"{MONOREPO_REMOTE_NAME}/{MONOREPO_BRANCH}"
|
||||
if self.use_branch_if_exists:
|
||||
try:
|
||||
self.git("checkout", branch)
|
||||
self.git("show", remote_branch + "..", call=True)
|
||||
if self.dry_run:
|
||||
fprint("Dry run... not creating MR")
|
||||
return True
|
||||
cont = input('\n Create MR [y/n]? ')
|
||||
if cont.strip().lower() != 'y':
|
||||
fprint("Cancelled")
|
||||
return False
|
||||
return self.push_branch(branch)
|
||||
except subprocess.CalledProcessError as e:
|
||||
pass
|
||||
|
||||
self.git("remote", "add", remote_name,
|
||||
f"{URL}{self.gl.user.username}/{project.name}.git", can_fail=True)
|
||||
self.git("fetch", remote_name)
|
||||
|
||||
if self.git("checkout", remote_branch, "-b", branch,
|
||||
interaction_message=f"checking out branch with `git checkout {remote_branch} -b {branch}`") == "SKIP":
|
||||
fprint(
|
||||
bold(f"{red('SKIPPED')} (couldn't checkout)\n"), nested=False)
|
||||
return False
|
||||
|
||||
# unset upstream to avoid to push to main (ie push.default = tracking)
|
||||
self.git("branch", branch, "--unset-upstream")
|
||||
|
||||
for commit in reversed([c for c in mr.commits()]):
|
||||
if self.git("cherry-pick", commit.id,
|
||||
interaction_message=f"cherry-picking {commit.id} onto {branch} with:\n "
|
||||
f" `$ git cherry-pick {commit.id}`",
|
||||
revert_operation=["cherry-pick", "--abort"]) == "SKIP":
|
||||
fprint(
|
||||
f"{yellow('SKIPPED')} (couldn't cherry-pick).", nested=False)
|
||||
return False
|
||||
|
||||
self.git("show", remote_branch + "..", call=True)
|
||||
if self.dry_run:
|
||||
fprint("Dry run... not creating MR\n")
|
||||
return True
|
||||
cont = input('\n Create MR [y/n]? ')
|
||||
if cont.strip().lower() != 'y':
|
||||
fprint(f"{red('Cancelled')}\n", nested=False)
|
||||
return False
|
||||
|
||||
return self.push_branch(branch)
|
||||
|
||||
def move_mrs(self, from_projects, to_project):
|
||||
failed_mrs = []
|
||||
found_mr = None
|
||||
for from_project in from_projects:
|
||||
with nested(f'{bold(from_project.path_with_namespace)}'):
|
||||
fprint(f'Fetching mrs')
|
||||
mrs = [mr for mr in from_project.mergerequests.list(
|
||||
all=True, author_id=self.gl.user.id) if mr.author['username'] == self.gl.user.username and mr.state == "opened"]
|
||||
if not mrs:
|
||||
fprint(f"{yellow(' None')}\n", nested=False)
|
||||
continue
|
||||
|
||||
fprint(f"{green(' DONE')}\n", nested=False)
|
||||
|
||||
for mr in mrs:
|
||||
if self.mr:
|
||||
if self.mr != mr.iid:
|
||||
continue
|
||||
found_mr = True
|
||||
fprint(
|
||||
f'Moving {mr.source_branch} "{mr.title}": {URL}{from_project.path_with_namespace}/merge_requests/{mr.iid}... ')
|
||||
if mr.source_branch in self.skipped_branches:
|
||||
print(f"{yellow('SKIPPED')} (blacklisted branch)")
|
||||
failed_mrs.append(
|
||||
f"{URL}{from_project.path_with_namespace}/merge_requests/{mr.iid}")
|
||||
continue
|
||||
if self.list_mrs_only:
|
||||
fprint("\n"f"List only: {yellow('SKIPPED')}\n")
|
||||
continue
|
||||
|
||||
with nested(f'{bold(from_project.path_with_namespace)}: {mr.iid}'):
|
||||
new_mr = self.recreate_mr(from_project, to_project, mr)
|
||||
if not new_mr:
|
||||
if not self.dry_run:
|
||||
failed_mrs.append(
|
||||
f"{URL}{from_project.path_with_namespace}/merge_requests/{mr.iid}")
|
||||
else:
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
|
||||
self.close_mr(from_project, to_project, mr, new_mr)
|
||||
|
||||
fprint(
|
||||
f"\n{yellow('DONE')} with {from_project.path_with_namespace}\n\n", nested=False)
|
||||
|
||||
if self.mr and not found_mr:
|
||||
sys.exit(
|
||||
bold(red(f"\n==> Couldn't find MR {self.mr} in {self.modules[0]}\n")))
|
||||
|
||||
for mr in failed_mrs:
|
||||
fprint(f"Didn't move MR: {mr}\n")
|
||||
|
||||
def close_mr(self, project, to_project, mr, new_mr):
|
||||
if new_mr:
|
||||
new_mr_url = f"{URL}/{to_project.path_with_namespace}/-/merge_requests/{new_mr.iid}"
|
||||
else:
|
||||
new_mr_url = None
|
||||
mr_url = f"{URL}/{project.path_with_namespace}/-/merge_requests/{mr.iid}"
|
||||
cont = input(f'\n Close old MR {mr_url} "{bold(mr.title)}" ? [y/n]')
|
||||
if cont.strip().lower() != 'y':
|
||||
fprint(f"{yellow('Not closing old MR')}\n")
|
||||
else:
|
||||
obj = None
|
||||
if new_mr_url:
|
||||
obj = {'body': f"Moved to: {new_mr_url}"}
|
||||
else:
|
||||
ret = input(
|
||||
f"Write a comment to add while closing MR {mr.iid} '{bold(mr.title)}':\n\n").strip()
|
||||
if ret:
|
||||
obj = {'body': ret}
|
||||
|
||||
if self.dry_run:
|
||||
fprint(f"{bold('Dry run, not closing')}\n", nested=False)
|
||||
else:
|
||||
if obj:
|
||||
mr.discussions.create(obj)
|
||||
mr.state_event = 'close'
|
||||
mr.save()
|
||||
fprint(
|
||||
f'Old MR {mr_url} "{bold(mr.title)}" {yellow("CLOSED")}\n')
|
||||
|
||||
def setup_repo(self):
|
||||
fprint(f"Setting up '{bold(ROOT_DIR)}'...")
|
||||
|
||||
try:
|
||||
out = self.git("status", "--porcelain")
|
||||
if out:
|
||||
fprint("\n" + red('Git repository is not clean:')
|
||||
+ "\n```\n" + out + "\n```\n")
|
||||
sys.exit(1)
|
||||
|
||||
except Exception as e:
|
||||
exit(
|
||||
f"Git repository{ROOT_DIR} is not clean. Clean it up before running {sys.argv[0]}\n ({e})")
|
||||
|
||||
self.git('remote', 'add', MONOREPO_REMOTE_NAME,
|
||||
MONOREPO_REMOTE, can_fail=True)
|
||||
self.git('fetch', MONOREPO_REMOTE_NAME)
|
||||
|
||||
self.git('remote', 'add', self.gl.user.username,
|
||||
f"git@gitlab.freedesktop.org:{self.gl.user.username}/gstreamer.git", can_fail=True)
|
||||
self.git('fetch', self.gl.user.username,
|
||||
interaction_message=f"Setup your fork of {URL}gstreamer/gstreamer as remote called {self.gl.user.username}")
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
|
||||
try:
|
||||
git_rename_limit = int(self.git("config", "merge.renameLimit"))
|
||||
except subprocess.CalledProcessError:
|
||||
git_rename_limit = 0
|
||||
if int(git_rename_limit) < 999999:
|
||||
self.git_rename_limit = git_rename_limit
|
||||
fprint(
|
||||
"-> Setting git rename limit to 999999 so we can properly cherry-pick between repos\n")
|
||||
self.git("config", "merge.renameLimit", "999999")
|
||||
|
||||
|
||||
def main():
|
||||
mover = GstMRMover()
|
||||
PARSER.parse_args(namespace=mover)
|
||||
mover.run()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
236
scripts/rebase-branch-from-old-module.py
Executable file
236
scripts/rebase-branch-from-old-module.py
Executable file
@ -0,0 +1,236 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from pathlib import Path as P
|
||||
from urllib.parse import urlparse
|
||||
from contextlib import contextmanager
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import argparse
|
||||
import requests
|
||||
|
||||
import subprocess
|
||||
|
||||
import random
|
||||
import string
|
||||
|
||||
URL = "https://gitlab.freedesktop.org/"
|
||||
PARSER = argparse.ArgumentParser(
|
||||
description="`Rebase` a branch from an old GStreamer module onto the monorepo"
|
||||
)
|
||||
PARSER.add_argument("repo", help="The repo with the old module to use. ie https://gitlab.freedesktop.org/user/gst-plugins-bad.git or /home/me/gst-build/subprojects/gst-plugins-bad")
|
||||
PARSER.add_argument("branch", help="The branch to rebase.")
|
||||
|
||||
log_depth = [] # type: T.List[str]
|
||||
|
||||
|
||||
@contextmanager
|
||||
def nested(name=''):
|
||||
global log_depth
|
||||
log_depth.append(name)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
log_depth.pop()
|
||||
|
||||
|
||||
def bold(text: str):
|
||||
return f"\033[1m{text}\033[0m"
|
||||
|
||||
|
||||
def green(text: str):
|
||||
return f"\033[1;32m{text}\033[0m"
|
||||
|
||||
|
||||
def red(text: str):
|
||||
return f"\033[1;31m{text}\033[0m"
|
||||
|
||||
|
||||
def yellow(text: str):
|
||||
return f"\033[1;33m{text}\033[0m"
|
||||
|
||||
|
||||
def fprint(msg, nested=True):
|
||||
if log_depth:
|
||||
prepend = log_depth[-1] + ' | ' if nested else ''
|
||||
else:
|
||||
prepend = ''
|
||||
|
||||
print(prepend + msg, end="")
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
class GstCherryPicker:
|
||||
def __init__(self):
|
||||
|
||||
self.branch = None
|
||||
self.repo = None
|
||||
self.module = None
|
||||
|
||||
self.git_rename_limit = None
|
||||
|
||||
def check_clean(self):
|
||||
try:
|
||||
out = self.git("status", "--porcelain")
|
||||
if out:
|
||||
fprint("\n" + red('Git repository is not clean:') + "\n```\n" + out + "\n```\n")
|
||||
sys.exit(1)
|
||||
|
||||
except Exception as e:
|
||||
sys.exit(
|
||||
f"Git repository is not clean. Clean it up before running ({e})")
|
||||
|
||||
def run(self):
|
||||
assert self.branch
|
||||
assert self.repo
|
||||
self.check_clean()
|
||||
|
||||
try:
|
||||
git_rename_limit = int(self.git("config", "merge.renameLimit"))
|
||||
except subprocess.CalledProcessError:
|
||||
git_rename_limit = 0
|
||||
if int(git_rename_limit) < 999999:
|
||||
self.git_rename_limit = git_rename_limit
|
||||
fprint("-> Setting git rename limit to 999999 so we can properly cherry-pick between repos")
|
||||
self.git("config", "merge.renameLimit", "999999")
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
|
||||
try:
|
||||
self.rebase()
|
||||
finally:
|
||||
if self.git_rename_limit is not None:
|
||||
self.git("config", "merge.renameLimit", str(self.git_rename_limit))
|
||||
|
||||
def rebase(self):
|
||||
repo = urlparse(self.repo)
|
||||
|
||||
repo_path = P(repo.path)
|
||||
self.module = module = repo_path.stem
|
||||
remote_name = f"{module}-{repo_path.parent.name}"
|
||||
fprint('Adding remotes...')
|
||||
self.git("remote", "add", remote_name, self.repo, can_fail=True)
|
||||
self.git("remote", "add", module, f"{URL}gstreamer/{module}.git",
|
||||
can_fail=True)
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
|
||||
fprint(f'Fetching {remote_name}...')
|
||||
self.git("fetch", remote_name,
|
||||
interaction_message=f"fetching {remote_name} with:\n"
|
||||
f" `$ git fetch {remote_name}`")
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
|
||||
fprint(f'Fetching {module}...')
|
||||
self.git("fetch", module,
|
||||
interaction_message=f"fetching {module} with:\n"
|
||||
f" `$ git fetch {module}`")
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
|
||||
prevbranch = self.git("rev-parse", "--abbrev-ref", "HEAD").strip()
|
||||
tmpbranchname = f"{remote_name}_{self.branch}"
|
||||
fprint(f'Checking out branch {remote_name}/{self.branch} as {tmpbranchname}\n')
|
||||
try:
|
||||
self.git("checkout", f"{remote_name}/{self.branch}", "-b", tmpbranchname)
|
||||
self.git("rebase", f"{module}/master",
|
||||
interaction_message=f"Failed rebasing {remote_name}/{self.branch} on {module}/master with:\n"
|
||||
f" `$ git rebase {module}/master`")
|
||||
ret = self.cherry_pick(tmpbranchname)
|
||||
except Exception as e:
|
||||
self.git("rebase", "--abort", can_fail=True)
|
||||
self.git("checkout", prevbranch)
|
||||
self.git("branch", "-D", tmpbranchname)
|
||||
raise
|
||||
if ret:
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
else:
|
||||
self.git("checkout", prevbranch)
|
||||
self.git("branch", "-D", tmpbranchname)
|
||||
fprint(f"{red(' ERROR')}\n", nested=False)
|
||||
|
||||
def cherry_pick(self, branch):
|
||||
shas = self.git('log', '--format=format:%H', f'{self.module}/master..').strip()
|
||||
fprint(f'Resetting on origin/main')
|
||||
self.git("reset", "--hard", "origin/main")
|
||||
fprint(f"{green(' OK')}\n", nested=False)
|
||||
|
||||
for sha in reversed(shas.split()):
|
||||
fprint(f' - Cherry picking: {bold(sha)}\n')
|
||||
try:
|
||||
self.git("cherry-pick", sha,
|
||||
interaction_message=f"cherry-picking {sha} onto {branch} with:\n "
|
||||
f" `$ git cherry-pick {sha}`",
|
||||
revert_operation=["cherry-pick", "--abort"])
|
||||
except Exception as e:
|
||||
fprint(f' - Cherry picking failed: {bold(sha)}\n')
|
||||
return False
|
||||
return True
|
||||
|
||||
def git(self, *args, can_fail=False, interaction_message=None, call=False, revert_operation=None):
|
||||
retry = True
|
||||
while retry:
|
||||
retry = False
|
||||
try:
|
||||
if not call:
|
||||
try:
|
||||
return subprocess.check_output(["git"] + list(args),
|
||||
stdin=subprocess.DEVNULL,
|
||||
stderr=subprocess.STDOUT).decode()
|
||||
except Exception as e:
|
||||
if not can_fail:
|
||||
fprint(f"\n\n{bold(red('ERROR'))}: `git {' '.join(args)}` failed" + "\n", nested=False)
|
||||
raise
|
||||
else:
|
||||
subprocess.call(["git"] + list(args))
|
||||
return "All good"
|
||||
except Exception as e:
|
||||
if interaction_message:
|
||||
output = getattr(e, "output", b"")
|
||||
if output is not None:
|
||||
out = output.decode()
|
||||
else:
|
||||
out = "????"
|
||||
fprint(f"\n```"
|
||||
f"\n{out}\n"
|
||||
f"Entering a shell to fix:\n\n"
|
||||
f" {bold(interaction_message)}\n\n"
|
||||
f"You should then exit with the following codes:\n\n"
|
||||
f" - {bold('`exit 0`')}: once you have fixed the problem and we can keep moving the \n"
|
||||
f" - {bold('`exit 1`')}: {bold('retry')}: once you have let the repo in a state where cherry-picking the commit should be to retried\n"
|
||||
f" - {bold('`exit 2`')}: stop the script and abandon rebasing your branch\n"
|
||||
"\n```\n", nested=False)
|
||||
try:
|
||||
if os.name == 'nt':
|
||||
shell = os.environ.get(
|
||||
"COMSPEC", r"C:\WINDOWS\system32\cmd.exe")
|
||||
else:
|
||||
shell = os.environ.get(
|
||||
"SHELL", os.path.realpath("/bin/sh"))
|
||||
subprocess.check_call(shell)
|
||||
except subprocess.CalledProcessError as e:
|
||||
if e.returncode == 1:
|
||||
retry = True
|
||||
continue
|
||||
elif e.returncode == 2:
|
||||
if revert_operation:
|
||||
self.git(*revert_operation, can_fail=True)
|
||||
raise
|
||||
except Exception as e:
|
||||
# Result of subshell does not really matter
|
||||
pass
|
||||
|
||||
return "User fixed it"
|
||||
|
||||
if can_fail:
|
||||
return "Failed but we do not care"
|
||||
|
||||
raise e
|
||||
|
||||
|
||||
def main():
|
||||
picker = GstCherryPicker()
|
||||
PARSER.parse_args(namespace=picker)
|
||||
picker.run()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
3
subprojects/gst-devtools/.gitignore
vendored
Normal file
3
subprojects/gst-devtools/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
*.bak
|
||||
build*
|
||||
mesonbuild*
|
20644
subprojects/gst-devtools/ChangeLog
Normal file
20644
subprojects/gst-devtools/ChangeLog
Normal file
File diff suppressed because it is too large
Load Diff
1898
subprojects/gst-devtools/NEWS
Normal file
1898
subprojects/gst-devtools/NEWS
Normal file
File diff suppressed because it is too large
Load Diff
100
subprojects/gst-devtools/RELEASE
Normal file
100
subprojects/gst-devtools/RELEASE
Normal file
@ -0,0 +1,100 @@
|
||||
This is GStreamer gst-devtools 1.20.0.
|
||||
|
||||
The GStreamer team is thrilled to announce a new major feature release
|
||||
of your favourite cross-platform multimedia framework!
|
||||
|
||||
As always, this release is again packed with new features, bug fixes and
|
||||
other improvements.
|
||||
|
||||
The 1.20 release series adds new features on top of the 1.18 series and is
|
||||
part of the API and ABI-stable 1.x release series.
|
||||
|
||||
Full release notes can be found at:
|
||||
|
||||
https://gstreamer.freedesktop.org/releases/1.20/
|
||||
|
||||
Binaries for Android, iOS, Mac OS X and Windows will usually be provided
|
||||
shortly after the release.
|
||||
|
||||
This module will not be very useful by itself and should be used in conjunction
|
||||
with other GStreamer modules for a complete multimedia experience.
|
||||
|
||||
- gstreamer: provides the core GStreamer libraries and some generic plugins
|
||||
|
||||
- gst-plugins-base: a basic set of well-supported plugins and additional
|
||||
media-specific GStreamer helper libraries for audio,
|
||||
video, rtsp, rtp, tags, OpenGL, etc.
|
||||
|
||||
- gst-plugins-good: a set of well-supported plugins under our preferred
|
||||
license
|
||||
|
||||
- gst-plugins-ugly: a set of well-supported plugins which might pose
|
||||
problems for distributors
|
||||
|
||||
- gst-plugins-bad: a set of plugins of varying quality that have not made
|
||||
their way into one of core/base/good/ugly yet, for one
|
||||
reason or another. Many of these are are production quality
|
||||
elements, but may still be missing documentation or unit
|
||||
tests; others haven't passed the rigorous quality testing
|
||||
we expect yet.
|
||||
|
||||
- gst-libav: a set of codecs plugins based on the ffmpeg library. This is
|
||||
where you can find audio and video decoders and encoders
|
||||
for a wide variety of formats including H.264, AAC, etc.
|
||||
|
||||
- gstreamer-vaapi: hardware-accelerated video decoding and encoding using
|
||||
VA-API on Linux. Primarily for Intel graphics hardware.
|
||||
|
||||
- gst-omx: hardware-accelerated video decoding and encoding, primarily for
|
||||
embedded Linux systems that provide an OpenMax
|
||||
implementation layer such as the Raspberry Pi.
|
||||
|
||||
- gst-rtsp-server: library to serve files or streaming pipelines via RTSP
|
||||
|
||||
- gst-editing-services: library an plugins for non-linear editing
|
||||
|
||||
==== Download ====
|
||||
|
||||
You can find source releases of gstreamer in the download
|
||||
directory: https://gstreamer.freedesktop.org/src/gstreamer/
|
||||
|
||||
The git repository and details how to clone it can be found at
|
||||
https://gitlab.freedesktop.org/gstreamer/gstreamer/
|
||||
|
||||
==== Homepage ====
|
||||
|
||||
The project's website is https://gstreamer.freedesktop.org/
|
||||
|
||||
==== Support and Bugs ====
|
||||
|
||||
We track bugs and feature requests in GitLab:
|
||||
|
||||
https://gitlab.freedesktop.org/gstreamer/gstreamer/
|
||||
|
||||
Please submit patches via GitLab as well, in form of Merge Requests. See
|
||||
|
||||
https://gstreamer.freedesktop.org/documentation/contribute/
|
||||
|
||||
for more details.
|
||||
|
||||
For help and support, please subscribe to and send questions to the
|
||||
gstreamer-devel mailing list (see below for details).
|
||||
|
||||
There is also a #gstreamer IRC channel on the OFTC IRC network.
|
||||
|
||||
Please do not submit support requests in GitLab, we only use it
|
||||
for bug tracking and merge requests review.
|
||||
|
||||
==== Developers ====
|
||||
|
||||
The GStreamer source code repository can be found on GitLab on freedesktop.org:
|
||||
|
||||
https://gitlab.freedesktop.org/gstreamer/gstreamer/
|
||||
|
||||
and can also be cloned from there and this is also where you can submit
|
||||
Merge Requests or file issues for bugs or feature requests.
|
||||
|
||||
Interested developers of the core library, plugins, and applications should
|
||||
subscribe to the gstreamer-devel list:
|
||||
|
||||
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
|
14
subprojects/gst-devtools/debug-viewer/.gitignore
vendored
Normal file
14
subprojects/gst-devtools/debug-viewer/.gitignore
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
|
||||
*.pyc
|
||||
*.pyo
|
||||
|
||||
*.glade.bak
|
||||
*.gladep
|
||||
*.gladep.bak
|
||||
|
||||
/build
|
||||
/dist
|
||||
/MANIFEST
|
||||
|
||||
po/*.pot
|
||||
po/mo
|
@ -0,0 +1,74 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Development Utilities
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Development Utilities Common Data module."""
|
||||
|
||||
import gi
|
||||
|
||||
from gi.repository import GObject
|
||||
|
||||
|
||||
class Dispatcher (object):
|
||||
|
||||
def __call__(self, iterator):
|
||||
|
||||
raise NotImplementedError("derived classes must override this method")
|
||||
|
||||
def cancel(self):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class DefaultDispatcher (Dispatcher):
|
||||
|
||||
def __call__(self, iterator):
|
||||
|
||||
for x in iterator:
|
||||
pass
|
||||
|
||||
|
||||
class GSourceDispatcher (Dispatcher):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
Dispatcher.__init__(self)
|
||||
|
||||
self.source_id = None
|
||||
|
||||
def __call__(self, iterator):
|
||||
|
||||
if self.source_id is not None:
|
||||
GObject.source_remove(self.source_id)
|
||||
|
||||
def iteration():
|
||||
r = iterator.__next__()
|
||||
if not r:
|
||||
self.source_id = None
|
||||
return r
|
||||
|
||||
self.source_id = GObject.idle_add(
|
||||
iteration, priority=GObject.PRIORITY_LOW)
|
||||
|
||||
def cancel(self):
|
||||
|
||||
if self.source_id is None:
|
||||
return
|
||||
|
||||
GObject.source_remove(self.source_id)
|
||||
self.source_id = None
|
@ -0,0 +1,528 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Development Utilities
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Development Utilities Common GUI module."""
|
||||
|
||||
import os
|
||||
|
||||
import logging
|
||||
|
||||
import gi
|
||||
|
||||
gi.require_version('Gtk', '3.0')
|
||||
from gi.repository import GObject
|
||||
from gi.repository import Gtk
|
||||
from gi.repository import Gdk
|
||||
from gi.types import GObjectMeta
|
||||
|
||||
import GstDebugViewer
|
||||
from GstDebugViewer.Common import utils
|
||||
from .generictreemodel import GenericTreeModel
|
||||
|
||||
|
||||
def widget_add_popup_menu(widget, menu, button=3):
|
||||
|
||||
def popup_callback(widget, event):
|
||||
|
||||
if event.button == button:
|
||||
menu.popup(
|
||||
None, None, None, None, event.button, event.get_time())
|
||||
return False
|
||||
|
||||
widget.connect("button-press-event", popup_callback)
|
||||
|
||||
|
||||
class Actions (dict):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
dict.__init__(self)
|
||||
|
||||
self.groups = {}
|
||||
|
||||
def __getattr__(self, name):
|
||||
|
||||
try:
|
||||
return self[name]
|
||||
except KeyError:
|
||||
if "_" in name:
|
||||
try:
|
||||
return self[name.replace("_", "-")]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
raise AttributeError("no action with name %r" % (name,))
|
||||
|
||||
def add_group(self, group):
|
||||
|
||||
name = group.props.name
|
||||
if name in self.groups:
|
||||
raise ValueError("already have a group named %s", name)
|
||||
self.groups[name] = group
|
||||
for action in group.list_actions():
|
||||
self[action.props.name] = action
|
||||
|
||||
|
||||
class Widgets (dict):
|
||||
|
||||
def __init__(self, builder):
|
||||
|
||||
widgets = (obj for obj in builder.get_objects()
|
||||
if isinstance(obj, Gtk.Buildable))
|
||||
# Gtk.Widget.get_name() shadows out the GtkBuildable interface method
|
||||
# of the same name, hence calling the unbound interface method here:
|
||||
items = ((Gtk.Buildable.get_name(w), w,) for w in widgets)
|
||||
|
||||
dict.__init__(self, items)
|
||||
|
||||
def __getattr__(self, name):
|
||||
|
||||
try:
|
||||
return self[name]
|
||||
except KeyError:
|
||||
if "_" in name:
|
||||
try:
|
||||
return self[name.replace("_", "-")]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
raise AttributeError("no widget with name %r" % (name,))
|
||||
|
||||
|
||||
class WidgetFactory (object):
|
||||
|
||||
def __init__(self, directory):
|
||||
|
||||
self.directory = directory
|
||||
|
||||
def get_builder(self, filename):
|
||||
|
||||
builder_filename = os.path.join(self.directory, filename)
|
||||
|
||||
builder = Gtk.Builder()
|
||||
builder.set_translation_domain(GstDebugViewer.GETTEXT_DOMAIN)
|
||||
builder.add_from_file(builder_filename)
|
||||
|
||||
return builder
|
||||
|
||||
def make(self, filename, widget_name, autoconnect=None):
|
||||
|
||||
builder = self.get_builder(filename)
|
||||
|
||||
if autoconnect is not None:
|
||||
builder.connect_signals(autoconnect)
|
||||
|
||||
return Widgets(builder)
|
||||
|
||||
def make_one(self, filename, widget_name):
|
||||
|
||||
builder = self.get_builder(filename)
|
||||
|
||||
return builder.get_object(widget_name)
|
||||
|
||||
|
||||
class UIFactory (object):
|
||||
|
||||
def __init__(self, ui_filename, actions=None):
|
||||
|
||||
self.filename = ui_filename
|
||||
if actions:
|
||||
self.action_groups = actions.groups
|
||||
else:
|
||||
self.action_groups = ()
|
||||
|
||||
def make(self, extra_actions=None):
|
||||
|
||||
ui_manager = Gtk.UIManager()
|
||||
for action_group in list(self.action_groups.values()):
|
||||
ui_manager.insert_action_group(action_group, 0)
|
||||
if extra_actions:
|
||||
for action_group in extra_actions.groups:
|
||||
ui_manager.insert_action_group(action_group, 0)
|
||||
ui_manager.add_ui_from_file(self.filename)
|
||||
ui_manager.ensure_update()
|
||||
|
||||
return ui_manager
|
||||
|
||||
|
||||
class MetaModel (GObjectMeta):
|
||||
|
||||
"""Meta class for easy setup of gtk tree models.
|
||||
|
||||
Looks for a class attribute named `columns' which must be set to a
|
||||
sequence of the form name1, type1, name2, type2, ..., where the
|
||||
names are strings. This metaclass adds the following attributes
|
||||
to created classes:
|
||||
|
||||
cls.column_types = (type1, type2, ...)
|
||||
cls.column_ids = (0, 1, ...)
|
||||
cls.name1 = 0
|
||||
cls.name2 = 1
|
||||
...
|
||||
|
||||
Example: A Gtk.ListStore derived model can use
|
||||
|
||||
columns = ("COL_NAME", str, "COL_VALUE", str)
|
||||
|
||||
and use this in __init__:
|
||||
|
||||
GObject.GObject.__init__ (self, *self.column_types)
|
||||
|
||||
Then insert data like this:
|
||||
|
||||
self.set (self.append (),
|
||||
self.COL_NAME, "spam",
|
||||
self.COL_VALUE, "ham")
|
||||
"""
|
||||
|
||||
def __init__(cls, name, bases, dict):
|
||||
|
||||
super(MetaModel, cls).__init__(name, bases, dict)
|
||||
|
||||
spec = tuple(cls.columns)
|
||||
|
||||
column_names = spec[::2]
|
||||
column_types = spec[1::2]
|
||||
column_indices = list(range(len(column_names)))
|
||||
|
||||
for col_index, col_name, in zip(column_indices, column_names):
|
||||
setattr(cls, col_name, col_index)
|
||||
|
||||
cls.column_types = column_types
|
||||
cls.column_ids = tuple(column_indices)
|
||||
|
||||
|
||||
class Manager (object):
|
||||
|
||||
"""GUI Manager base class."""
|
||||
|
||||
@classmethod
|
||||
def iter_item_classes(cls):
|
||||
|
||||
msg = "%s class does not support manager item class access"
|
||||
raise NotImplementedError(msg % (cls.__name__,))
|
||||
|
||||
@classmethod
|
||||
def find_item_class(self, **kw):
|
||||
|
||||
return self.__find_by_attrs(self.iter_item_classes(), kw)
|
||||
|
||||
def iter_items(self):
|
||||
|
||||
msg = "%s object does not support manager item access"
|
||||
raise NotImplementedError(msg % (type(self).__name__,))
|
||||
|
||||
def find_item(self, **kw):
|
||||
|
||||
return self.__find_by_attrs(self.iter_items(), kw)
|
||||
|
||||
@staticmethod
|
||||
def __find_by_attrs(i, kw):
|
||||
|
||||
from operator import attrgetter
|
||||
|
||||
if len(kw) != 1:
|
||||
raise ValueError("need exactly one keyword argument")
|
||||
|
||||
attr, value = list(kw.items())[0]
|
||||
getter = attrgetter(attr)
|
||||
|
||||
for item in i:
|
||||
if getter(item) == value:
|
||||
return item
|
||||
else:
|
||||
raise KeyError("no item such that item.%s == %r" % (attr, value,))
|
||||
|
||||
|
||||
class StateString (object):
|
||||
|
||||
"""Descriptor for binding to StateSection classes."""
|
||||
|
||||
def __init__(self, option, default=None):
|
||||
|
||||
self.option = option
|
||||
self.default = default
|
||||
|
||||
def __get__(self, section, section_class=None):
|
||||
|
||||
import configparser
|
||||
|
||||
if section is None:
|
||||
return self
|
||||
|
||||
try:
|
||||
return self.get(section)
|
||||
except (configparser.NoSectionError,
|
||||
configparser.NoOptionError,):
|
||||
return self.get_default(section)
|
||||
|
||||
def __set__(self, section, value):
|
||||
|
||||
import configparser
|
||||
|
||||
self.set(section, value)
|
||||
|
||||
def get(self, section):
|
||||
|
||||
return section.get(self)
|
||||
|
||||
def get_default(self, section):
|
||||
|
||||
return self.default
|
||||
|
||||
def set(self, section, value):
|
||||
|
||||
if value is None:
|
||||
value = ""
|
||||
|
||||
section.set(self, str(value))
|
||||
|
||||
|
||||
class StateBool (StateString):
|
||||
|
||||
"""Descriptor for binding to StateSection classes."""
|
||||
|
||||
def get(self, section):
|
||||
|
||||
return section.state._parser.getboolean(section._name, self.option)
|
||||
|
||||
|
||||
class StateInt (StateString):
|
||||
|
||||
"""Descriptor for binding to StateSection classes."""
|
||||
|
||||
def get(self, section):
|
||||
|
||||
return section.state._parser.getint(section._name, self.option)
|
||||
|
||||
|
||||
class StateInt4 (StateString):
|
||||
|
||||
"""Descriptor for binding to StateSection classes. This implements storing
|
||||
a tuple of 4 integers."""
|
||||
|
||||
def get(self, section):
|
||||
|
||||
value = StateString.get(self, section)
|
||||
|
||||
try:
|
||||
l = value.split(",")
|
||||
if len(l) != 4:
|
||||
return None
|
||||
else:
|
||||
return tuple((int(v) for v in l))
|
||||
except (AttributeError, TypeError, ValueError,):
|
||||
return None
|
||||
|
||||
def set(self, section, value):
|
||||
|
||||
if value is None:
|
||||
svalue = ""
|
||||
elif len(value) != 4:
|
||||
raise ValueError("value needs to be a 4-sequence, or None")
|
||||
else:
|
||||
svalue = ", ".join((str(v) for v in value))
|
||||
|
||||
return StateString.set(self, section, svalue)
|
||||
|
||||
|
||||
class StateItem (StateString):
|
||||
|
||||
"""Descriptor for binding to StateSection classes. This implements storing
|
||||
a class controlled by a Manager class."""
|
||||
|
||||
def __init__(self, option, manager_class, default=None):
|
||||
|
||||
StateString.__init__(self, option, default=default)
|
||||
|
||||
self.manager = manager_class
|
||||
|
||||
def get(self, section):
|
||||
|
||||
value = SectionString.get(self, section)
|
||||
|
||||
if not value:
|
||||
return None
|
||||
|
||||
return self.parse_item(value)
|
||||
|
||||
def set(self, section, value):
|
||||
|
||||
if value is None:
|
||||
svalue = ""
|
||||
else:
|
||||
svalue = value.name
|
||||
|
||||
StateString.set(self, section, svalue)
|
||||
|
||||
def parse_item(self, value):
|
||||
|
||||
name = value.strip()
|
||||
|
||||
try:
|
||||
return self.manager.find_item_class(name=name)
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
|
||||
class StateItemList (StateItem):
|
||||
|
||||
"""Descriptor for binding to StateSection classes. This implements storing
|
||||
an ordered set of Manager items."""
|
||||
|
||||
def get(self, section):
|
||||
|
||||
value = StateString.get(self, section)
|
||||
|
||||
if not value:
|
||||
return []
|
||||
|
||||
classes = []
|
||||
for name in value.split(","):
|
||||
item_class = self.parse_item(name)
|
||||
if item_class is None:
|
||||
continue
|
||||
if not item_class in classes:
|
||||
classes.append(item_class)
|
||||
|
||||
return classes
|
||||
|
||||
def get_default(self, section):
|
||||
|
||||
default = StateItem.get_default(self, section)
|
||||
if default is None:
|
||||
return []
|
||||
else:
|
||||
return default
|
||||
|
||||
def set(self, section, value):
|
||||
|
||||
if value is None:
|
||||
svalue = ""
|
||||
else:
|
||||
svalue = ", ".join((v.name for v in value))
|
||||
|
||||
StateString.set(self, section, svalue)
|
||||
|
||||
|
||||
class StateSection (object):
|
||||
|
||||
_name = None
|
||||
|
||||
def __init__(self, state):
|
||||
|
||||
self.state = state
|
||||
|
||||
if self._name is None:
|
||||
raise NotImplementedError(
|
||||
"subclasses must override the _name attribute")
|
||||
|
||||
def get(self, state_string):
|
||||
|
||||
return self.state._parser.get(self._name, state_string.option)
|
||||
|
||||
def set(self, state_string, value):
|
||||
|
||||
import configparser
|
||||
|
||||
parser = self.state._parser
|
||||
|
||||
try:
|
||||
parser.set(self._name, state_string.option, value)
|
||||
except configparser.NoSectionError:
|
||||
parser.add_section(self._name)
|
||||
parser.set(self._name, state_string.option, value)
|
||||
|
||||
|
||||
class State (object):
|
||||
|
||||
def __init__(self, filename, old_filenames=()):
|
||||
|
||||
import configparser
|
||||
|
||||
self.sections = {}
|
||||
|
||||
self._filename = filename
|
||||
self._parser = configparser.RawConfigParser()
|
||||
success = self._parser.read([filename])
|
||||
if not success:
|
||||
for old_filename in old_filenames:
|
||||
success = self._parser.read([old_filename])
|
||||
if success:
|
||||
break
|
||||
|
||||
def add_section_class(self, section_class):
|
||||
|
||||
self.sections[section_class._name] = section_class(self)
|
||||
|
||||
def save(self):
|
||||
|
||||
with utils.SaveWriteFile(self._filename, "wt") as fp:
|
||||
self._parser.write(fp)
|
||||
|
||||
|
||||
class WindowState (object):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self.logger = logging.getLogger("ui.window-state")
|
||||
|
||||
self.is_maximized = False
|
||||
|
||||
def attach(self, window, state):
|
||||
|
||||
self.window = window
|
||||
self.state = state
|
||||
|
||||
self.window.connect("window-state-event",
|
||||
self.handle_window_state_event)
|
||||
|
||||
geometry = self.state.geometry
|
||||
if geometry:
|
||||
self.window.move(*geometry[:2])
|
||||
self.window.set_default_size(*geometry[2:])
|
||||
|
||||
if self.state.maximized:
|
||||
self.logger.debug("initially maximized")
|
||||
self.window.maximize()
|
||||
|
||||
def detach(self):
|
||||
|
||||
window = self.window
|
||||
|
||||
self.state.maximized = self.is_maximized
|
||||
if not self.is_maximized:
|
||||
position = tuple(window.get_position())
|
||||
size = tuple(window.get_size())
|
||||
self.state.geometry = position + size
|
||||
|
||||
self.window.disconnect_by_func(self.handle_window_state_event)
|
||||
self.window = None
|
||||
|
||||
def handle_window_state_event(self, window, event):
|
||||
|
||||
if not event.changed_mask & Gdk.WindowState.MAXIMIZED:
|
||||
return
|
||||
|
||||
if event.new_window_state & Gdk.WindowState.MAXIMIZED:
|
||||
self.logger.debug("maximized")
|
||||
self.is_maximized = True
|
||||
else:
|
||||
self.logger.debug("unmaximized")
|
||||
self.is_maximized = False
|
@ -0,0 +1,366 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Development Utilities
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Development Utilities Common Main module."""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import traceback
|
||||
from operator import attrgetter
|
||||
import logging
|
||||
import locale
|
||||
import gettext
|
||||
from gettext import gettext as _, ngettext
|
||||
|
||||
import gi
|
||||
|
||||
from gi.repository import GLib
|
||||
from gi.repository import GObject
|
||||
from gi.repository import Gtk
|
||||
|
||||
|
||||
class ExceptionHandler (object):
|
||||
|
||||
exc_types = (Exception,)
|
||||
priority = 50
|
||||
inherit_fork = True
|
||||
|
||||
_handling_exception = False
|
||||
|
||||
def __call__(self, exc_type, exc_value, exc_traceback):
|
||||
|
||||
raise NotImplementedError(
|
||||
"derived classes need to override this method")
|
||||
|
||||
|
||||
class DefaultExceptionHandler (ExceptionHandler):
|
||||
exc_types = (BaseException,)
|
||||
priority = 0
|
||||
inherit_fork = True
|
||||
|
||||
def __init__(self, excepthook):
|
||||
|
||||
ExceptionHandler.__init__(self)
|
||||
|
||||
self.excepthook = excepthook
|
||||
|
||||
def __call__(self, *exc_info):
|
||||
|
||||
return self.excepthook(*exc_info)
|
||||
|
||||
|
||||
class ExitOnInterruptExceptionHandler (ExceptionHandler):
|
||||
|
||||
exc_types = (KeyboardInterrupt,)
|
||||
priority = 100
|
||||
inherit_fork = False
|
||||
|
||||
exit_status = 2
|
||||
|
||||
def __call__(self, *args):
|
||||
|
||||
print("Interrupt caught, exiting.", file=sys.stderr)
|
||||
|
||||
sys.exit(self.exit_status)
|
||||
|
||||
|
||||
class MainLoopWrapper (ExceptionHandler):
|
||||
|
||||
priority = 95
|
||||
inherit_fork = False
|
||||
|
||||
def __init__(self, enter, exit):
|
||||
|
||||
ExceptionHandler.__init__(self)
|
||||
|
||||
self.exc_info = (None,) * 3
|
||||
self.enter = enter
|
||||
self.exit = exit
|
||||
|
||||
def __call__(self, *exc_info):
|
||||
|
||||
self.exc_info = exc_info
|
||||
self.exit()
|
||||
|
||||
def run(self):
|
||||
|
||||
ExceptHookManager.register_handler(self)
|
||||
try:
|
||||
self.enter()
|
||||
finally:
|
||||
ExceptHookManager.unregister_handler(self)
|
||||
|
||||
if self.exc_info != (None,) * 3:
|
||||
# Re-raise unhandled exception that occured while running the loop.
|
||||
exc_type, exc_value, exc_tb = self.exc_info
|
||||
raise exc_value
|
||||
|
||||
|
||||
class ExceptHookManagerClass (object):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self._in_forked_child = False
|
||||
|
||||
self.handlers = []
|
||||
|
||||
def setup(self):
|
||||
|
||||
if sys.excepthook == self.__excepthook:
|
||||
raise ValueError("already set up")
|
||||
|
||||
hook = sys.excepthook
|
||||
self.__instrument_excepthook()
|
||||
self.__instrument_fork()
|
||||
self.register_handler(DefaultExceptionHandler(hook))
|
||||
|
||||
def shutdown(self):
|
||||
|
||||
if sys.excepthook != self.__excepthook:
|
||||
raise ValueError("not set up")
|
||||
|
||||
self.__restore_excepthook()
|
||||
self.__restore_fork()
|
||||
|
||||
def __instrument_excepthook(self):
|
||||
|
||||
hook = sys.excepthook
|
||||
self._original_excepthook = hook
|
||||
sys.excepthook = self.__excepthook
|
||||
|
||||
def __restore_excepthook(self):
|
||||
|
||||
sys.excepthook = self._original_excepthook
|
||||
|
||||
def __instrument_fork(self):
|
||||
|
||||
try:
|
||||
fork = os.fork
|
||||
except AttributeError:
|
||||
# System has no fork() system call.
|
||||
self._original_fork = None
|
||||
else:
|
||||
self._original_fork = fork
|
||||
os.fork = self.__fork
|
||||
|
||||
def __restore_fork(self):
|
||||
|
||||
if not hasattr(os, "fork"):
|
||||
return
|
||||
|
||||
os.fork = self._original_fork
|
||||
|
||||
def entered_forked_child(self):
|
||||
|
||||
self._in_forked_child = True
|
||||
|
||||
for handler in tuple(self.handlers):
|
||||
if not handler.inherit_fork:
|
||||
self.handlers.remove(handler)
|
||||
|
||||
def register_handler(self, handler):
|
||||
|
||||
if self._in_forked_child and not handler.inherit_fork:
|
||||
return
|
||||
|
||||
self.handlers.append(handler)
|
||||
|
||||
def unregister_handler(self, handler):
|
||||
|
||||
self.handlers.remove(handler)
|
||||
|
||||
def __fork(self):
|
||||
|
||||
pid = self._original_fork()
|
||||
if pid == 0:
|
||||
# Child process.
|
||||
self.entered_forked_child()
|
||||
return pid
|
||||
|
||||
def __excepthook(self, exc_type, exc_value, exc_traceback):
|
||||
|
||||
for handler in sorted(self.handlers,
|
||||
key=attrgetter("priority"),
|
||||
reverse=True):
|
||||
|
||||
if handler._handling_exception:
|
||||
continue
|
||||
|
||||
for type_ in handler.exc_types:
|
||||
if issubclass(exc_type, type_):
|
||||
break
|
||||
else:
|
||||
continue
|
||||
|
||||
handler._handling_exception = True
|
||||
handler(exc_type, exc_value, exc_traceback)
|
||||
# Not using try...finally on purpose here. If the handler itself
|
||||
# fails with an exception, this prevents recursing into it again.
|
||||
handler._handling_exception = False
|
||||
return
|
||||
|
||||
else:
|
||||
from warnings import warn
|
||||
warn("ExceptHookManager: unhandled %r" % (exc_value,),
|
||||
RuntimeWarning,
|
||||
stacklevel=2)
|
||||
|
||||
|
||||
ExceptHookManager = ExceptHookManagerClass()
|
||||
|
||||
|
||||
class PathsBase (object):
|
||||
|
||||
data_dir = None
|
||||
icon_dir = None
|
||||
locale_dir = None
|
||||
|
||||
@classmethod
|
||||
def setup_installed(cls, data_prefix):
|
||||
"""Set up paths for running from a regular installation."""
|
||||
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def setup_devenv(cls, source_dir):
|
||||
"""Set up paths for running the development environment
|
||||
(i.e. directly from the source dist)."""
|
||||
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def ensure_setup(cls):
|
||||
"""If paths are still not set up, try to set from a fallback."""
|
||||
|
||||
if cls.data_dir is None:
|
||||
source_dir = os.path.dirname(
|
||||
os.path.dirname(os.path.abspath(__file__)))
|
||||
cls.setup_devenv(source_dir)
|
||||
|
||||
def __new__(cls):
|
||||
|
||||
raise RuntimeError("do not create instances of this class -- "
|
||||
"use the class object directly")
|
||||
|
||||
|
||||
class PathsProgramBase (PathsBase):
|
||||
|
||||
program_name = None
|
||||
|
||||
@classmethod
|
||||
def setup_installed(cls, data_prefix):
|
||||
|
||||
if cls.program_name is None:
|
||||
raise NotImplementedError(
|
||||
"derived classes need to set program_name attribute")
|
||||
|
||||
cls.data_dir = os.path.join(data_prefix, cls.program_name)
|
||||
cls.icon_dir = os.path.join(data_prefix, "icons")
|
||||
cls.locale_dir = os.path.join(data_prefix, "locale")
|
||||
|
||||
@classmethod
|
||||
def setup_devenv(cls, source_dir):
|
||||
"""Set up paths for running the development environment
|
||||
(i.e. directly from the source dist)."""
|
||||
|
||||
# This is essential: The GUI module needs to find the .glade file.
|
||||
cls.data_dir = os.path.join(source_dir, "data")
|
||||
|
||||
# The locale data might be missing if "setup.py build" wasn't run.
|
||||
cls.locale_dir = os.path.join(source_dir, "build", "mo")
|
||||
|
||||
# Not setting icon_dir. It is not useful since we don't employ the
|
||||
# needed directory structure in the source dist.
|
||||
|
||||
|
||||
def _init_excepthooks():
|
||||
|
||||
ExceptHookManager.setup()
|
||||
ExceptHookManager.register_handler(ExitOnInterruptExceptionHandler())
|
||||
|
||||
|
||||
def _init_paths(paths):
|
||||
|
||||
paths.ensure_setup()
|
||||
|
||||
|
||||
def _init_locale(gettext_domain=None):
|
||||
|
||||
if Paths.locale_dir and gettext_domain is not None:
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, "")
|
||||
except locale.Error as exc:
|
||||
from warnings import warn
|
||||
warn("locale error: %s" % (exc,),
|
||||
RuntimeWarning,
|
||||
stacklevel=2)
|
||||
Paths.locale_dir = None
|
||||
else:
|
||||
gettext.bindtextdomain(gettext_domain, Paths.locale_dir)
|
||||
gettext.textdomain(gettext_domain)
|
||||
gettext.bind_textdomain_codeset(gettext_domain, "UTF-8")
|
||||
|
||||
|
||||
def _init_logging(level):
|
||||
if level == "none":
|
||||
return
|
||||
|
||||
mapping = {"debug": logging.DEBUG,
|
||||
"info": logging.INFO,
|
||||
"warning": logging.WARNING,
|
||||
"error": logging.ERROR,
|
||||
"critical": logging.CRITICAL}
|
||||
logging.basicConfig(level=mapping[level],
|
||||
format='%(asctime)s.%(msecs)03d %(levelname)8s %(name)20s: %(message)s',
|
||||
datefmt='%H:%M:%S')
|
||||
|
||||
logger = logging.getLogger("main")
|
||||
logger.debug("logging at level %s", logging.getLevelName(level))
|
||||
logger.info("using Python %i.%i.%i %s %i", *sys.version_info)
|
||||
|
||||
|
||||
def _init_log_option(parser):
|
||||
choices = ["none", "debug", "info", "warning", "error", "critical"]
|
||||
parser.add_option("--log-level", "-l",
|
||||
type="choice",
|
||||
choices=choices,
|
||||
action="store",
|
||||
dest="log_level",
|
||||
default="none",
|
||||
help=_("Enable logging, possible values: ") + ", ".join(choices))
|
||||
return parser
|
||||
|
||||
|
||||
def main(main_function, option_parser, gettext_domain=None, paths=None):
|
||||
|
||||
# FIXME:
|
||||
global Paths
|
||||
Paths = paths
|
||||
|
||||
_init_excepthooks()
|
||||
_init_paths(paths)
|
||||
_init_locale(gettext_domain)
|
||||
parser = _init_log_option(option_parser)
|
||||
options, args = option_parser.parse_args()
|
||||
_init_logging(options.log_level)
|
||||
|
||||
try:
|
||||
main_function(args)
|
||||
finally:
|
||||
logging.shutdown()
|
@ -0,0 +1,25 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Development Utilities
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Development Utilities Common package."""
|
||||
|
||||
from . import Data
|
||||
from . import GUI
|
||||
from . import Main
|
||||
from . import utils
|
@ -0,0 +1,420 @@
|
||||
# -*- Mode: Python; py-indent-offset: 4 -*-
|
||||
# generictreemodel - GenericTreeModel implementation for pygtk compatibility.
|
||||
# Copyright (C) 2013 Simon Feltman
|
||||
#
|
||||
# generictreemodel.py: GenericTreeModel implementation for pygtk compatibility
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
# System
|
||||
import sys
|
||||
import random
|
||||
import collections
|
||||
import ctypes
|
||||
|
||||
# GObject
|
||||
from gi.repository import GObject
|
||||
from gi.repository import Gtk
|
||||
|
||||
|
||||
class _CTreeIter(ctypes.Structure):
|
||||
_fields_ = [('stamp', ctypes.c_int),
|
||||
('user_data', ctypes.c_void_p),
|
||||
('user_data2', ctypes.c_void_p),
|
||||
('user_data3', ctypes.c_void_p)]
|
||||
|
||||
@classmethod
|
||||
def from_iter(cls, iter):
|
||||
offset = sys.getsizeof(object()) # size of PyObject_HEAD
|
||||
return ctypes.POINTER(cls).from_address(id(iter) + offset)
|
||||
|
||||
|
||||
def _get_user_data_as_pyobject(iter):
|
||||
citer = _CTreeIter.from_iter(iter)
|
||||
return ctypes.cast(citer.contents.user_data, ctypes.py_object).value
|
||||
|
||||
|
||||
def handle_exception(default_return):
|
||||
"""Returns a function which can act as a decorator for wrapping exceptions and
|
||||
returning "default_return" upon an exception being thrown.
|
||||
|
||||
This is used to wrap Gtk.TreeModel "do_" method implementations so we can return
|
||||
a proper value from the override upon an exception occurring with client code
|
||||
implemented by the "on_" methods.
|
||||
"""
|
||||
def decorator(func):
|
||||
def wrapped_func(*args, **kargs):
|
||||
try:
|
||||
return func(*args, **kargs)
|
||||
except BaseException:
|
||||
# Use excepthook directly to avoid any printing to the screen
|
||||
# if someone installed an except hook.
|
||||
sys.excepthook(*sys.exc_info())
|
||||
return default_return
|
||||
return wrapped_func
|
||||
return decorator
|
||||
|
||||
|
||||
class GenericTreeModel(GObject.GObject, Gtk.TreeModel):
|
||||
|
||||
"""A base implementation of a Gtk.TreeModel for python.
|
||||
|
||||
The GenericTreeModel eases implementing the Gtk.TreeModel interface in Python.
|
||||
The class can be subclassed to provide a TreeModel implementation which works
|
||||
directly with Python objects instead of iterators.
|
||||
|
||||
All of the on_* methods should be overridden by subclasses to provide the
|
||||
underlying implementation a way to access custom model data. For the purposes of
|
||||
this API, all custom model data supplied or handed back through the overridable
|
||||
API will use the argument names: node, parent, and child in regards to user data
|
||||
python objects.
|
||||
|
||||
The create_tree_iter, set_user_data, invalidate_iters, iter_is_valid methods are
|
||||
available to help manage Gtk.TreeIter objects and their Python object references.
|
||||
|
||||
GenericTreeModel manages a pool of user data nodes that have been used with iters.
|
||||
This pool stores a references to user data nodes as a dictionary value with the
|
||||
key being the integer id of the data. This id is what the Gtk.TreeIter objects
|
||||
use to reference data in the pool.
|
||||
References will be removed from the pool when the model is deleted or explicitly
|
||||
by using the optional "node" argument to the "row_deleted" method when notifying
|
||||
the model of row deletion.
|
||||
"""
|
||||
|
||||
leak_references = GObject.Property(default=True, type=bool,
|
||||
blurb="If True, strong references to user data attached to iters are "
|
||||
"stored in a dictionary pool (default). Otherwise the user data is "
|
||||
"stored as a raw pointer to a python object without a reference.")
|
||||
|
||||
#
|
||||
# Methods
|
||||
#
|
||||
def __init__(self):
|
||||
"""Initialize. Make sure to call this from derived classes if overridden."""
|
||||
super(GenericTreeModel, self).__init__()
|
||||
self.stamp = 0
|
||||
|
||||
#: Dictionary of (id(user_data): user_data), used when leak-refernces=False
|
||||
self._held_refs = dict()
|
||||
|
||||
# Set initial stamp
|
||||
self.invalidate_iters()
|
||||
|
||||
def iter_depth_first(self):
|
||||
"""Depth-first iteration of the entire TreeModel yielding the python nodes."""
|
||||
stack = collections.deque([None])
|
||||
while stack:
|
||||
it = stack.popleft()
|
||||
if it is not None:
|
||||
yield self.get_user_data(it)
|
||||
children = [self.iter_nth_child(it, i)
|
||||
for i in range(self.iter_n_children(it))]
|
||||
stack.extendleft(reversed(children))
|
||||
|
||||
def invalidate_iter(self, iter):
|
||||
"""Clear user data and its reference from the iter and this model."""
|
||||
iter.stamp = 0
|
||||
if iter.user_data:
|
||||
if iter.user_data in self._held_refs:
|
||||
del self._held_refs[iter.user_data]
|
||||
iter.user_data = None
|
||||
|
||||
def invalidate_iters(self):
|
||||
"""
|
||||
This method invalidates all TreeIter objects associated with this custom tree model
|
||||
and frees their locally pooled references.
|
||||
"""
|
||||
self.stamp = random.randint(-2147483648, 2147483647)
|
||||
self._held_refs.clear()
|
||||
|
||||
def iter_is_valid(self, iter):
|
||||
"""
|
||||
:Returns:
|
||||
True if the gtk.TreeIter specified by iter is valid for the custom tree model.
|
||||
"""
|
||||
return iter.stamp == self.stamp
|
||||
|
||||
def get_user_data(self, iter):
|
||||
"""Get the user_data associated with the given TreeIter.
|
||||
|
||||
GenericTreeModel stores arbitrary Python objects mapped to instances of Gtk.TreeIter.
|
||||
This method allows to retrieve the Python object held by the given iterator.
|
||||
"""
|
||||
if self.leak_references:
|
||||
return self._held_refs[iter.user_data]
|
||||
else:
|
||||
return _get_user_data_as_pyobject(iter)
|
||||
|
||||
def set_user_data(self, iter, user_data):
|
||||
"""Applies user_data and stamp to the given iter.
|
||||
|
||||
If the models "leak_references" property is set, a reference to the
|
||||
user_data is stored with the model to ensure we don't run into bad
|
||||
memory problems with the TreeIter.
|
||||
"""
|
||||
iter.user_data = id(user_data)
|
||||
|
||||
if user_data is None:
|
||||
self.invalidate_iter(iter)
|
||||
else:
|
||||
iter.stamp = self.stamp
|
||||
if self.leak_references:
|
||||
self._held_refs[iter.user_data] = user_data
|
||||
|
||||
def create_tree_iter(self, user_data):
|
||||
"""Create a Gtk.TreeIter instance with the given user_data specific for this model.
|
||||
|
||||
Use this method to create Gtk.TreeIter instance instead of directly calling
|
||||
Gtk.Treeiter(), this will ensure proper reference managment of wrapped used_data.
|
||||
"""
|
||||
iter = Gtk.TreeIter()
|
||||
self.set_user_data(iter, user_data)
|
||||
return iter
|
||||
|
||||
def _create_tree_iter(self, data):
|
||||
"""Internal creation of a (bool, TreeIter) pair for returning directly
|
||||
back to the view interfacing with this model."""
|
||||
if data is None:
|
||||
return (False, None)
|
||||
else:
|
||||
it = self.create_tree_iter(data)
|
||||
return (True, it)
|
||||
|
||||
def row_deleted(self, path, node=None):
|
||||
"""Notify the model a row has been deleted.
|
||||
|
||||
Use the node parameter to ensure the user_data reference associated
|
||||
with the path is properly freed by this model.
|
||||
|
||||
:Parameters:
|
||||
path : Gtk.TreePath
|
||||
Path to the row that has been deleted.
|
||||
node : object
|
||||
Python object used as the node returned from "on_get_iter". This is
|
||||
optional but ensures the model will not leak references to this object.
|
||||
"""
|
||||
super(GenericTreeModel, self).row_deleted(path)
|
||||
node_id = id(node)
|
||||
if node_id in self._held_refs:
|
||||
del self._held_refs[node_id]
|
||||
|
||||
#
|
||||
# GtkTreeModel Interface Implementation
|
||||
#
|
||||
@handle_exception(0)
|
||||
def do_get_flags(self):
|
||||
"""Internal method."""
|
||||
return self.on_get_flags()
|
||||
|
||||
@handle_exception(0)
|
||||
def do_get_n_columns(self):
|
||||
"""Internal method."""
|
||||
return self.on_get_n_columns()
|
||||
|
||||
@handle_exception(GObject.TYPE_INVALID)
|
||||
def do_get_column_type(self, index):
|
||||
"""Internal method."""
|
||||
return self.on_get_column_type(index)
|
||||
|
||||
@handle_exception((False, None))
|
||||
def do_get_iter(self, path):
|
||||
"""Internal method."""
|
||||
return self._create_tree_iter(self.on_get_iter(path))
|
||||
|
||||
@handle_exception(False)
|
||||
def do_iter_next(self, iter):
|
||||
"""Internal method."""
|
||||
if iter is None:
|
||||
next_data = self.on_iter_next(None)
|
||||
else:
|
||||
next_data = self.on_iter_next(self.get_user_data(iter))
|
||||
|
||||
self.set_user_data(iter, next_data)
|
||||
return next_data is not None
|
||||
|
||||
@handle_exception(None)
|
||||
def do_get_path(self, iter):
|
||||
"""Internal method."""
|
||||
path = self.on_get_path(self.get_user_data(iter))
|
||||
if path is None:
|
||||
return None
|
||||
else:
|
||||
return Gtk.TreePath(path)
|
||||
|
||||
@handle_exception(None)
|
||||
def do_get_value(self, iter, column):
|
||||
"""Internal method."""
|
||||
return self.on_get_value(self.get_user_data(iter), column)
|
||||
|
||||
@handle_exception((False, None))
|
||||
def do_iter_children(self, parent):
|
||||
"""Internal method."""
|
||||
data = self.get_user_data(parent) if parent else None
|
||||
return self._create_tree_iter(self.on_iter_children(data))
|
||||
|
||||
@handle_exception(False)
|
||||
def do_iter_has_child(self, parent):
|
||||
"""Internal method."""
|
||||
return self.on_iter_has_child(self.get_user_data(parent))
|
||||
|
||||
@handle_exception(0)
|
||||
def do_iter_n_children(self, iter):
|
||||
"""Internal method."""
|
||||
if iter is None:
|
||||
return self.on_iter_n_children(None)
|
||||
return self.on_iter_n_children(self.get_user_data(iter))
|
||||
|
||||
@handle_exception((False, None))
|
||||
def do_iter_nth_child(self, parent, n):
|
||||
"""Internal method."""
|
||||
if parent is None:
|
||||
data = self.on_iter_nth_child(None, n)
|
||||
else:
|
||||
data = self.on_iter_nth_child(self.get_user_data(parent), n)
|
||||
return self._create_tree_iter(data)
|
||||
|
||||
@handle_exception((False, None))
|
||||
def do_iter_parent(self, child):
|
||||
"""Internal method."""
|
||||
return self._create_tree_iter(self.on_iter_parent(self.get_user_data(child)))
|
||||
|
||||
@handle_exception(None)
|
||||
def do_ref_node(self, iter):
|
||||
self.on_ref_node(self.get_user_data(iter))
|
||||
|
||||
@handle_exception(None)
|
||||
def do_unref_node(self, iter):
|
||||
self.on_unref_node(self.get_user_data(iter))
|
||||
|
||||
#
|
||||
# Python Subclass Overridables
|
||||
#
|
||||
def on_get_flags(self):
|
||||
"""Overridable.
|
||||
|
||||
:Returns Gtk.TreeModelFlags:
|
||||
The flags for this model. See: Gtk.TreeModelFlags
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def on_get_n_columns(self):
|
||||
"""Overridable.
|
||||
|
||||
:Returns:
|
||||
The number of columns for this model.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def on_get_column_type(self, index):
|
||||
"""Overridable.
|
||||
|
||||
:Returns:
|
||||
The column type for the given index.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def on_get_iter(self, path):
|
||||
"""Overridable.
|
||||
|
||||
:Returns:
|
||||
A python object (node) for the given TreePath.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def on_iter_next(self, node):
|
||||
"""Overridable.
|
||||
|
||||
:Parameters:
|
||||
node : object
|
||||
Node at current level.
|
||||
|
||||
:Returns:
|
||||
A python object (node) following the given node at the current level.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def on_get_path(self, node):
|
||||
"""Overridable.
|
||||
|
||||
:Returns:
|
||||
A TreePath for the given node.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def on_get_value(self, node, column):
|
||||
"""Overridable.
|
||||
|
||||
:Parameters:
|
||||
node : object
|
||||
column : int
|
||||
Column index to get the value from.
|
||||
|
||||
:Returns:
|
||||
The value of the column for the given node."""
|
||||
raise NotImplementedError
|
||||
|
||||
def on_iter_children(self, parent):
|
||||
"""Overridable.
|
||||
|
||||
:Returns:
|
||||
The first child of parent or None if parent has no children.
|
||||
If parent is None, return the first node of the model.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def on_iter_has_child(self, node):
|
||||
"""Overridable.
|
||||
|
||||
:Returns:
|
||||
True if the given node has children.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def on_iter_n_children(self, node):
|
||||
"""Overridable.
|
||||
|
||||
:Returns:
|
||||
The number of children for the given node. If node is None,
|
||||
return the number of top level nodes.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def on_iter_nth_child(self, parent, n):
|
||||
"""Overridable.
|
||||
|
||||
:Parameters:
|
||||
parent : object
|
||||
n : int
|
||||
Index of child within parent.
|
||||
|
||||
:Returns:
|
||||
The child for the given parent index starting at 0. If parent None,
|
||||
return the top level node corresponding to "n".
|
||||
If "n" is larger then available nodes, return None.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def on_iter_parent(self, child):
|
||||
"""Overridable.
|
||||
|
||||
:Returns:
|
||||
The parent node of child or None if child is a top level node."""
|
||||
raise NotImplementedError
|
||||
|
||||
def on_ref_node(self, node):
|
||||
pass
|
||||
|
||||
def on_unref_node(self, node):
|
||||
pass
|
@ -0,0 +1,333 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Development Utilities
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program; if not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Development Utilities Common utils module."""
|
||||
|
||||
import os
|
||||
import logging
|
||||
import subprocess as _subprocess
|
||||
|
||||
|
||||
class SingletonMeta (type):
|
||||
|
||||
def __init__(cls, name, bases, dict_):
|
||||
|
||||
from weakref import WeakValueDictionary
|
||||
|
||||
super(SingletonMeta, cls).__init__(name, bases, dict_)
|
||||
|
||||
cls._singleton_instances = WeakValueDictionary()
|
||||
|
||||
def __call__(cls, *a, **kw):
|
||||
|
||||
kw_key = tuple(sorted(kw.items()))
|
||||
|
||||
try:
|
||||
obj = cls._singleton_instances[a + kw_key]
|
||||
except KeyError:
|
||||
obj = super(SingletonMeta, cls).__call__(*a, **kw)
|
||||
cls._singleton_instances[a + kw_key] = obj
|
||||
return obj
|
||||
|
||||
|
||||
def gettext_cache():
|
||||
"""Return a callable object that operates like gettext.gettext, but is much
|
||||
faster when a string is looked up more than once. This is very useful in
|
||||
loops, where calling gettext.gettext can quickly become a major performance
|
||||
bottleneck."""
|
||||
|
||||
from gettext import gettext
|
||||
|
||||
d = {}
|
||||
|
||||
def gettext_cache_access(s):
|
||||
|
||||
if s not in d:
|
||||
d[s] = gettext(s)
|
||||
return d[s]
|
||||
|
||||
return gettext_cache_access
|
||||
|
||||
|
||||
class ClassProperty (property):
|
||||
|
||||
"Like the property class, but also invokes the getter for class access."
|
||||
|
||||
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
|
||||
|
||||
property.__init__(self, fget, fset, fdel, doc)
|
||||
|
||||
self.__fget = fget
|
||||
|
||||
def __get__(self, obj, obj_class=None):
|
||||
|
||||
ret = property.__get__(self, obj, obj_class)
|
||||
if ret == self:
|
||||
return self.__fget(None)
|
||||
else:
|
||||
return ret
|
||||
|
||||
|
||||
class _XDGClass (object):
|
||||
|
||||
"""Partial implementation of the XDG Base Directory specification v0.6.
|
||||
|
||||
http://standards.freedesktop.org/basedir-spec/basedir-spec-0.6.html"""
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self._add_base_dir("DATA_HOME", "~/.local/share")
|
||||
self._add_base_dir("CONFIG_HOME", "~/.config")
|
||||
self._add_base_dir("CACHE_HOME", "~/.cache")
|
||||
|
||||
def _add_base_dir(self, name, default):
|
||||
|
||||
dir = os.environ.get("XDG_%s" % (name,))
|
||||
if not dir:
|
||||
dir = os.path.expanduser(os.path.join(*default.split("/")))
|
||||
|
||||
setattr(self, name, dir)
|
||||
|
||||
|
||||
XDG = _XDGClass()
|
||||
|
||||
|
||||
class SaveWriteFile (object):
|
||||
|
||||
def __init__(self, filename, mode="wt"):
|
||||
|
||||
from tempfile import mkstemp
|
||||
|
||||
self.logger = logging.getLogger("tempfile")
|
||||
|
||||
dir = os.path.dirname(filename)
|
||||
base_name = os.path.basename(filename)
|
||||
temp_prefix = "%s-tmp" % (base_name,)
|
||||
|
||||
if dir:
|
||||
# Destination dir differs from current directory, ensure that it
|
||||
# exists:
|
||||
try:
|
||||
os.makedirs(dir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
self.clean_stale(dir, temp_prefix)
|
||||
|
||||
fd, temp_name = mkstemp(dir=dir, prefix=temp_prefix)
|
||||
|
||||
self.target_name = filename
|
||||
self.temp_name = temp_name
|
||||
self.real_file = os.fdopen(fd, mode)
|
||||
|
||||
def __enter__(self):
|
||||
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc_args):
|
||||
|
||||
if exc_args == (None, None, None,):
|
||||
self.close()
|
||||
else:
|
||||
self.discard()
|
||||
|
||||
def __del__(self):
|
||||
|
||||
try:
|
||||
self.discard()
|
||||
except AttributeError:
|
||||
# If __init__ failed, self has no real_file attribute.
|
||||
pass
|
||||
|
||||
def __close_real(self):
|
||||
|
||||
if self.real_file:
|
||||
self.real_file.close()
|
||||
self.real_file = None
|
||||
|
||||
def clean_stale(self, dir, temp_prefix):
|
||||
|
||||
from time import time
|
||||
from glob import glob
|
||||
|
||||
now = time()
|
||||
pattern = os.path.join(dir, "%s*" % (temp_prefix,))
|
||||
|
||||
for temp_filename in glob(pattern):
|
||||
mtime = os.stat(temp_filename).st_mtime
|
||||
if now - mtime > 3600:
|
||||
self.logger.info("deleting stale temporary file %s",
|
||||
temp_filename)
|
||||
try:
|
||||
os.unlink(temp_filename)
|
||||
except EnvironmentError as exc:
|
||||
self.logger.warning("deleting stale temporary file "
|
||||
"failed: %s", exc)
|
||||
|
||||
def tell(self, *a, **kw):
|
||||
|
||||
return self.real_file.tell(*a, **kw)
|
||||
|
||||
def write(self, *a, **kw):
|
||||
|
||||
return self.real_file.write(*a, **kw)
|
||||
|
||||
def close(self):
|
||||
|
||||
self.__close_real()
|
||||
|
||||
if self.temp_name:
|
||||
try:
|
||||
os.rename(self.temp_name, self.target_name)
|
||||
except OSError as exc:
|
||||
import errno
|
||||
if exc.errno == errno.EEXIST:
|
||||
# We are probably on windows.
|
||||
os.unlink(self.target_name)
|
||||
os.rename(self.temp_name, self.target_name)
|
||||
self.temp_name = None
|
||||
|
||||
def discard(self):
|
||||
|
||||
self.__close_real()
|
||||
|
||||
if self.temp_name:
|
||||
|
||||
try:
|
||||
os.unlink(self.temp_name)
|
||||
except EnvironmentError as exc:
|
||||
self.logger.warning("deleting temporary file failed: %s", exc)
|
||||
self.temp_name = None
|
||||
|
||||
|
||||
class TeeWriteFile (object):
|
||||
|
||||
# TODO Py2.5: Add context manager methods.
|
||||
|
||||
def __init__(self, *file_objects):
|
||||
|
||||
self.files = list(file_objects)
|
||||
|
||||
def close(self):
|
||||
|
||||
for file in self.files:
|
||||
file.close()
|
||||
|
||||
def flush(self):
|
||||
|
||||
for file in self.files:
|
||||
file.flush()
|
||||
|
||||
def write(self, string):
|
||||
|
||||
for file in self.files:
|
||||
file.write(string)
|
||||
|
||||
def writelines(self, lines):
|
||||
|
||||
for file in self.files:
|
||||
file.writelines(lines)
|
||||
|
||||
|
||||
class FixedPopen (_subprocess.Popen):
|
||||
|
||||
def __init__(self, args, **kw):
|
||||
|
||||
# Unconditionally specify all descriptors as redirected, to
|
||||
# work around Python bug #1358527 (which is triggered for
|
||||
# console-less applications on Windows).
|
||||
|
||||
close = []
|
||||
|
||||
for name in ("stdin", "stdout", "stderr",):
|
||||
target = kw.get(name)
|
||||
if not target:
|
||||
kw[name] = _subprocess.PIPE
|
||||
close.append(name)
|
||||
|
||||
_subprocess.Popen.__init__(self, args, **kw)
|
||||
|
||||
for name in close:
|
||||
fp = getattr(self, name)
|
||||
fp.close()
|
||||
setattr(self, name, None)
|
||||
|
||||
|
||||
class DevhelpError (EnvironmentError):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class DevhelpUnavailableError (DevhelpError):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class DevhelpClient (object):
|
||||
|
||||
def available(self):
|
||||
|
||||
try:
|
||||
self.version()
|
||||
except DevhelpUnavailableError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def version(self):
|
||||
|
||||
return self._invoke("--version")
|
||||
|
||||
def search(self, entry):
|
||||
|
||||
self._invoke_no_interact("-s", entry)
|
||||
|
||||
def _check_os_error(self, exc):
|
||||
|
||||
import errno
|
||||
if exc.errno == errno.ENOENT:
|
||||
raise DevhelpUnavailableError()
|
||||
|
||||
def _invoke(self, *args):
|
||||
|
||||
from subprocess import PIPE
|
||||
|
||||
try:
|
||||
proc = FixedPopen(("devhelp",) + args,
|
||||
stdout=PIPE)
|
||||
except OSError as exc:
|
||||
self._check_os_error(exc)
|
||||
raise
|
||||
|
||||
out, err = proc.communicate()
|
||||
|
||||
if proc.returncode is not None and proc.returncode != 0:
|
||||
raise DevhelpError("devhelp exited with status %i"
|
||||
% (proc.returncode,))
|
||||
return out
|
||||
|
||||
def _invoke_no_interact(self, *args):
|
||||
|
||||
from subprocess import PIPE
|
||||
|
||||
try:
|
||||
proc = FixedPopen(("devhelp",) + args)
|
||||
except OSError as exc:
|
||||
self._check_os_error(exc)
|
||||
raise
|
482
subprojects/gst-devtools/debug-viewer/GstDebugViewer/Data.py
Normal file
482
subprojects/gst-devtools/debug-viewer/GstDebugViewer/Data.py
Normal file
@ -0,0 +1,482 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Debug Viewer - View and analyze GStreamer debug log files
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Debug Viewer Data module."""
|
||||
|
||||
import os
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
|
||||
# Nanosecond resolution (like Gst.SECOND)
|
||||
SECOND = 1000000000
|
||||
|
||||
|
||||
def time_args(ts):
|
||||
|
||||
secs = ts // SECOND
|
||||
|
||||
return "%i:%02i:%02i.%09i" % (secs // 60 ** 2,
|
||||
secs // 60 % 60,
|
||||
secs % 60,
|
||||
ts % SECOND,)
|
||||
|
||||
|
||||
def time_diff_args(time_diff):
|
||||
|
||||
if time_diff >= 0:
|
||||
sign = "+"
|
||||
else:
|
||||
sign = "-"
|
||||
|
||||
secs = abs(time_diff) // SECOND
|
||||
|
||||
return "%s%02i:%02i.%09i" % (sign,
|
||||
secs // 60,
|
||||
secs % 60,
|
||||
abs(time_diff) % SECOND,)
|
||||
|
||||
|
||||
def time_args_no_hours(ts):
|
||||
|
||||
secs = ts // SECOND
|
||||
|
||||
return "%02i:%02i.%09i" % (secs // 60,
|
||||
secs % 60,
|
||||
ts % SECOND,)
|
||||
|
||||
|
||||
def parse_time(st):
|
||||
"""Parse time strings that look like "0:00:00.0000000"."""
|
||||
|
||||
h, m, s = st.split(":")
|
||||
secs, subsecs = s.split(".")
|
||||
|
||||
return int((int(h) * 60 ** 2 + int(m) * 60) * SECOND) + \
|
||||
int(secs) * SECOND + int(subsecs)
|
||||
|
||||
|
||||
class DebugLevel (int):
|
||||
|
||||
__names = ["NONE", "ERROR", "WARN", "FIXME",
|
||||
"INFO", "DEBUG", "LOG", "TRACE", "MEMDUMP"]
|
||||
__instances = {}
|
||||
|
||||
def __new__(cls, level):
|
||||
|
||||
try:
|
||||
level_int = int(level)
|
||||
except (ValueError, TypeError,):
|
||||
try:
|
||||
level_int = cls.__names.index(level.upper())
|
||||
except ValueError:
|
||||
raise ValueError("no debug level named %r" % (level,))
|
||||
if level_int in cls.__instances:
|
||||
return cls.__instances[level_int]
|
||||
else:
|
||||
new_instance = int.__new__(cls, level_int)
|
||||
new_instance.name = cls.__names[level_int]
|
||||
cls.__instances[level_int] = new_instance
|
||||
return new_instance
|
||||
|
||||
def __repr__(self):
|
||||
|
||||
return "<%s %s (%i)>" % (type(self).__name__, self.__names[self], self,)
|
||||
|
||||
def higher_level(self):
|
||||
|
||||
if self == len(self.__names) - 1:
|
||||
raise ValueError("already the highest debug level")
|
||||
|
||||
return DebugLevel(self + 1)
|
||||
|
||||
def lower_level(self):
|
||||
|
||||
if self == 0:
|
||||
raise ValueError("already the lowest debug level")
|
||||
|
||||
return DebugLevel(self - 1)
|
||||
|
||||
|
||||
debug_level_none = DebugLevel("NONE")
|
||||
debug_level_error = DebugLevel("ERROR")
|
||||
debug_level_warning = DebugLevel("WARN")
|
||||
debug_level_info = DebugLevel("INFO")
|
||||
debug_level_debug = DebugLevel("DEBUG")
|
||||
debug_level_log = DebugLevel("LOG")
|
||||
debug_level_fixme = DebugLevel("FIXME")
|
||||
debug_level_trace = DebugLevel("TRACE")
|
||||
debug_level_memdump = DebugLevel("MEMDUMP")
|
||||
debug_levels = [debug_level_none,
|
||||
debug_level_trace,
|
||||
debug_level_fixme,
|
||||
debug_level_log,
|
||||
debug_level_debug,
|
||||
debug_level_info,
|
||||
debug_level_warning,
|
||||
debug_level_error,
|
||||
debug_level_memdump]
|
||||
|
||||
# For stripping color codes:
|
||||
_escape = re.compile(b"\x1b\\[[0-9;]*m")
|
||||
|
||||
|
||||
def strip_escape(s):
|
||||
|
||||
# FIXME: This can be optimized further!
|
||||
|
||||
while b"\x1b" in s:
|
||||
s = _escape.sub(b"", s)
|
||||
return s
|
||||
|
||||
|
||||
def default_log_line_regex_():
|
||||
|
||||
# "DEBUG "
|
||||
LEVEL = "([A-Z]+)\s*"
|
||||
# "0x8165430 "
|
||||
THREAD = r"(0x[0-9a-f]+)\s+" # r"\((0x[0-9a-f]+) - "
|
||||
# "0:00:00.777913000 "
|
||||
TIME = r"(\d+:\d\d:\d\d\.\d+)\s+"
|
||||
CATEGORY = "([A-Za-z0-9_-]+)\s+" # "GST_REFCOUNTING ", "flacdec "
|
||||
# " 3089 "
|
||||
PID = r"(\d+)\s*"
|
||||
FILENAME = r"([^:]*):"
|
||||
LINE = r"(\d+):"
|
||||
FUNCTION = "(~?[A-Za-z0-9_\s\*,\(\)]*):"
|
||||
# FIXME: When non-g(st)object stuff is logged with *_OBJECT (like
|
||||
# buffers!), the address is printed *without* <> brackets!
|
||||
OBJECT = "(?:<([^>]+)>)?"
|
||||
MESSAGE = "(.+)"
|
||||
|
||||
ANSI = "(?:\x1b\\[[0-9;]*m\\s*)*\\s*"
|
||||
|
||||
# New log format:
|
||||
expressions = [TIME, ANSI, PID, ANSI, THREAD, ANSI, LEVEL, ANSI,
|
||||
CATEGORY, FILENAME, LINE, FUNCTION, ANSI,
|
||||
OBJECT, ANSI, MESSAGE]
|
||||
# Old log format:
|
||||
# expressions = [LEVEL, THREAD, TIME, CATEGORY, PID, FILENAME, LINE,
|
||||
# FUNCTION, OBJECT, MESSAGE]
|
||||
|
||||
return expressions
|
||||
|
||||
|
||||
def default_log_line_regex():
|
||||
|
||||
return re.compile("".join(default_log_line_regex_()))
|
||||
|
||||
|
||||
class Producer (object):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self.consumers = []
|
||||
|
||||
def have_load_started(self):
|
||||
|
||||
for consumer in self.consumers:
|
||||
consumer.handle_load_started()
|
||||
|
||||
def have_load_finished(self):
|
||||
|
||||
for consumer in self.consumers:
|
||||
consumer.handle_load_finished()
|
||||
|
||||
|
||||
class SortHelper (object):
|
||||
|
||||
def __init__(self, fileobj, offsets):
|
||||
|
||||
self._gen = self.__gen(fileobj, offsets)
|
||||
next(self._gen)
|
||||
|
||||
# Override in the instance, for performance (this gets called in an
|
||||
# inner loop):
|
||||
self.find_insert_position = self._gen.send
|
||||
|
||||
@staticmethod
|
||||
def find_insert_position(insert_time_string):
|
||||
|
||||
# Stub for documentary purposes.
|
||||
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def __gen(fileobj, offsets):
|
||||
|
||||
from math import floor
|
||||
tell = fileobj.tell
|
||||
seek = fileobj.seek
|
||||
read = fileobj.read
|
||||
time_len = len(time_args(0))
|
||||
|
||||
# We remember the previous insertion point. This gives a nice speed up
|
||||
# for larger bubbles which are already sorted. TODO: In practice, log
|
||||
# lines only get out of order across threads. Need to check if it pays
|
||||
# to parse the thread here and maintain multiple insertion points for
|
||||
# heavily interleaved parts of the log.
|
||||
pos = 0
|
||||
pos_time_string = ""
|
||||
|
||||
insert_pos = None
|
||||
while True:
|
||||
insert_time_string = (yield insert_pos)
|
||||
|
||||
save_offset = tell()
|
||||
|
||||
if pos_time_string <= insert_time_string:
|
||||
lo = pos
|
||||
hi = len(offsets)
|
||||
else:
|
||||
lo = 0
|
||||
hi = pos
|
||||
|
||||
# This is a bisection search, except we don't cut the range in the
|
||||
# middle each time, but at the 90th percentile. This is because
|
||||
# logs are "mostly sorted", so the insertion point is much more
|
||||
# likely to be at the end anyways:
|
||||
while lo < hi:
|
||||
mid = int(floor(lo * 0.1 + hi * 0.9))
|
||||
seek(offsets[mid])
|
||||
mid_time_string = read(time_len)
|
||||
if insert_time_string.encode('utf8') < mid_time_string:
|
||||
hi = mid
|
||||
else:
|
||||
lo = mid + 1
|
||||
pos = lo
|
||||
# Caller will replace row at pos with the new one, so this is
|
||||
# correct:
|
||||
pos_time_string = insert_time_string
|
||||
|
||||
insert_pos = pos
|
||||
|
||||
seek(save_offset)
|
||||
|
||||
|
||||
class LineCache (Producer):
|
||||
"""
|
||||
offsets: file position for each line
|
||||
levels: the debug level for each line
|
||||
"""
|
||||
|
||||
_lines_per_iteration = 50000
|
||||
|
||||
def __init__(self, fileobj, dispatcher):
|
||||
|
||||
Producer.__init__(self)
|
||||
|
||||
self.logger = logging.getLogger("linecache")
|
||||
self.dispatcher = dispatcher
|
||||
|
||||
self.__fileobj = fileobj
|
||||
self.__fileobj.seek(0, 2)
|
||||
self.__file_size = self.__fileobj.tell()
|
||||
self.__fileobj.seek(0)
|
||||
|
||||
self.offsets = []
|
||||
self.levels = [] # FIXME
|
||||
|
||||
def start_loading(self):
|
||||
|
||||
self.logger.debug("dispatching load process")
|
||||
self.have_load_started()
|
||||
self.dispatcher(self.__process())
|
||||
|
||||
def get_progress(self):
|
||||
|
||||
return float(self.__fileobj.tell()) / self.__file_size
|
||||
|
||||
def __process(self):
|
||||
|
||||
offsets = self.offsets
|
||||
levels = self.levels
|
||||
|
||||
dict_levels = {"T": debug_level_trace, "F": debug_level_fixme,
|
||||
"L": debug_level_log, "D": debug_level_debug,
|
||||
"I": debug_level_info, "W": debug_level_warning,
|
||||
"E": debug_level_error, " ": debug_level_none,
|
||||
"M": debug_level_memdump, }
|
||||
ANSI = "(?:\x1b\\[[0-9;]*m)?"
|
||||
ANSI_PATTERN = r"\d:\d\d:\d\d\.\d+ " + ANSI + \
|
||||
r" *\d+" + ANSI + \
|
||||
r" +0x[0-9a-f]+ +" + ANSI + \
|
||||
r"([TFLDIEWM ])"
|
||||
BARE_PATTERN = ANSI_PATTERN.replace(ANSI, "")
|
||||
rexp_bare = re.compile(BARE_PATTERN)
|
||||
rexp_ansi = re.compile(ANSI_PATTERN)
|
||||
rexp = rexp_bare
|
||||
|
||||
# Moving attribute lookups out of the loop:
|
||||
readline = self.__fileobj.readline
|
||||
tell = self.__fileobj.tell
|
||||
rexp_match = rexp.match
|
||||
levels_append = levels.append
|
||||
offsets_append = offsets.append
|
||||
dict_levels_get = dict_levels.get
|
||||
|
||||
self.__fileobj.seek(0)
|
||||
limit = self._lines_per_iteration
|
||||
last_line = ""
|
||||
i = 0
|
||||
sort_helper = SortHelper(self.__fileobj, offsets)
|
||||
find_insert_position = sort_helper.find_insert_position
|
||||
while True:
|
||||
i += 1
|
||||
if i >= limit:
|
||||
i = 0
|
||||
yield True
|
||||
|
||||
offset = tell()
|
||||
line = readline().decode('utf-8', errors='replace')
|
||||
if not line:
|
||||
break
|
||||
match = rexp_match(line)
|
||||
if match is None:
|
||||
if rexp is rexp_ansi or "\x1b" not in line:
|
||||
continue
|
||||
|
||||
match = rexp_ansi.match(line)
|
||||
if match is None:
|
||||
continue
|
||||
# Switch to slower ANSI parsing:
|
||||
rexp = rexp_ansi
|
||||
rexp_match = rexp.match
|
||||
|
||||
# Timestamp is in the very beginning of the row, and can be sorted
|
||||
# by lexical comparison. That's why we don't bother parsing the
|
||||
# time to integer. We also don't have to take a substring here,
|
||||
# which would be a useless memcpy.
|
||||
if line >= last_line:
|
||||
levels_append(
|
||||
dict_levels_get(match.group(1), debug_level_none))
|
||||
offsets_append(offset)
|
||||
last_line = line
|
||||
else:
|
||||
pos = find_insert_position(line)
|
||||
levels.insert(
|
||||
pos, dict_levels_get(match.group(1), debug_level_none))
|
||||
offsets.insert(pos, offset)
|
||||
|
||||
self.have_load_finished()
|
||||
yield False
|
||||
|
||||
|
||||
class LogLine (list):
|
||||
|
||||
_line_regex = default_log_line_regex()
|
||||
|
||||
@classmethod
|
||||
def parse_full(cls, line_string):
|
||||
match = cls._line_regex.match(line_string.decode('utf8', errors='replace'))
|
||||
if match is None:
|
||||
# raise ValueError ("not a valid log line (%r)" % (line_string,))
|
||||
groups = [0, 0, 0, 0, "", "", 0, "", "", 0]
|
||||
return cls(groups)
|
||||
|
||||
line = cls(match.groups())
|
||||
# Timestamp.
|
||||
line[0] = parse_time(line[0])
|
||||
# PID.
|
||||
line[1] = int(line[1])
|
||||
# Thread.
|
||||
line[2] = int(line[2], 16)
|
||||
# Level (this is handled in LineCache).
|
||||
line[3] = 0
|
||||
# Line.
|
||||
line[6] = int(line[6])
|
||||
# Message start offset.
|
||||
line[9] = match.start(9 + 1)
|
||||
|
||||
for col_id in (4, # COL_CATEGORY
|
||||
5, # COL_FILENAME
|
||||
7, # COL_FUNCTION,
|
||||
8,): # COL_OBJECT
|
||||
line[col_id] = sys.intern(line[col_id] or "")
|
||||
|
||||
return line
|
||||
|
||||
|
||||
class LogLines (object):
|
||||
|
||||
def __init__(self, fileobj, line_cache):
|
||||
|
||||
self.__fileobj = fileobj
|
||||
self.__line_cache = line_cache
|
||||
|
||||
def __len__(self):
|
||||
|
||||
return len(self.__line_cache.offsets)
|
||||
|
||||
def __getitem__(self, line_index):
|
||||
|
||||
offset = self.__line_cache.offsets[line_index]
|
||||
self.__fileobj.seek(offset)
|
||||
line_string = self.__fileobj.readline()
|
||||
line = LogLine.parse_full(line_string)
|
||||
msg = line_string[line[-1]:]
|
||||
line[-1] = msg
|
||||
return line
|
||||
|
||||
def __iter__(self):
|
||||
|
||||
size = len(self)
|
||||
i = 0
|
||||
while i < size:
|
||||
yield self[i]
|
||||
i += 1
|
||||
|
||||
|
||||
class LogFile (Producer):
|
||||
|
||||
def __init__(self, filename, dispatcher):
|
||||
|
||||
import mmap
|
||||
|
||||
Producer.__init__(self)
|
||||
|
||||
self.logger = logging.getLogger("logfile")
|
||||
|
||||
self.path = os.path.normpath(os.path.abspath(filename))
|
||||
self.__real_fileobj = open(filename, "rb")
|
||||
self.fileobj = mmap.mmap(
|
||||
self.__real_fileobj.fileno(), 0, access=mmap.ACCESS_READ)
|
||||
self.line_cache = LineCache(self.fileobj, dispatcher)
|
||||
self.line_cache.consumers.append(self)
|
||||
|
||||
def start_loading(self):
|
||||
|
||||
self.logger.debug("starting load")
|
||||
self.line_cache.start_loading()
|
||||
|
||||
def get_load_progress(self):
|
||||
|
||||
return self.line_cache.get_progress()
|
||||
|
||||
def handle_load_started(self):
|
||||
|
||||
# Chain up to our consumers:
|
||||
self.have_load_started()
|
||||
|
||||
def handle_load_finished(self):
|
||||
self.logger.debug("finish loading")
|
||||
self.lines = LogLines(self.fileobj, self.line_cache)
|
||||
|
||||
# Chain up to our consumers:
|
||||
self.have_load_finished()
|
@ -0,0 +1,44 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Debug Viewer - View and analyze GStreamer debug log files
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Debug Viewer GUI module."""
|
||||
|
||||
__author__ = u"René Stadler <mail@renestadler.de>"
|
||||
__version__ = "0.1"
|
||||
|
||||
import gi
|
||||
|
||||
from GstDebugViewer.GUI.app import App
|
||||
|
||||
|
||||
def main(args):
|
||||
|
||||
app = App()
|
||||
|
||||
# TODO: Once we support more than one window, open one window for each
|
||||
# supplied filename.
|
||||
window = app.windows[0]
|
||||
if len(args) > 0:
|
||||
window.set_log_file(args[0])
|
||||
|
||||
app.run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
158
subprojects/gst-devtools/debug-viewer/GstDebugViewer/GUI/app.py
Normal file
158
subprojects/gst-devtools/debug-viewer/GstDebugViewer/GUI/app.py
Normal file
@ -0,0 +1,158 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Debug Viewer - View and analyze GStreamer debug log files
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Debug Viewer GUI module."""
|
||||
|
||||
import os.path
|
||||
|
||||
import gi
|
||||
gi.require_version('Gdk', '3.0')
|
||||
gi.require_version('Gtk', '3.0')
|
||||
|
||||
from gi.repository import GObject
|
||||
from gi.repository import Gdk
|
||||
from gi.repository import Gtk
|
||||
|
||||
from GstDebugViewer import Common
|
||||
from GstDebugViewer.GUI.columns import ViewColumnManager
|
||||
from GstDebugViewer.GUI.window import Window
|
||||
|
||||
|
||||
class AppStateSection (Common.GUI.StateSection):
|
||||
|
||||
_name = "state"
|
||||
|
||||
geometry = Common.GUI.StateInt4("window-geometry")
|
||||
maximized = Common.GUI.StateBool("window-maximized")
|
||||
|
||||
column_order = Common.GUI.StateItemList("column-order", ViewColumnManager)
|
||||
columns_visible = Common.GUI.StateItemList(
|
||||
"columns-visible", ViewColumnManager)
|
||||
|
||||
zoom_level = Common.GUI.StateInt("zoom-level")
|
||||
|
||||
|
||||
class AppState (Common.GUI.State):
|
||||
|
||||
def __init__(self, *a, **kw):
|
||||
|
||||
Common.GUI.State.__init__(self, *a, **kw)
|
||||
|
||||
self.add_section_class(AppStateSection)
|
||||
|
||||
|
||||
class App (object):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self.attach()
|
||||
|
||||
def load_plugins(self):
|
||||
|
||||
from GstDebugViewer import Plugins
|
||||
|
||||
plugin_classes = list(
|
||||
Plugins.load([os.path.dirname(Plugins.__file__)]))
|
||||
self.plugins = []
|
||||
for plugin_class in plugin_classes:
|
||||
plugin = plugin_class(self)
|
||||
self.plugins.append(plugin)
|
||||
|
||||
def iter_plugin_features(self):
|
||||
|
||||
for plugin in self.plugins:
|
||||
for feature in plugin.features:
|
||||
yield feature
|
||||
|
||||
def attach(self):
|
||||
|
||||
config_home = Common.utils.XDG.CONFIG_HOME
|
||||
|
||||
state_filename = os.path.join(
|
||||
config_home, "gst-debug-viewer", "state")
|
||||
|
||||
self.state = AppState(state_filename)
|
||||
self.state_section = self.state.sections["state"]
|
||||
|
||||
self.load_plugins()
|
||||
|
||||
self.windows = []
|
||||
|
||||
# Apply custom widget stying
|
||||
# TODO: check for dark theme
|
||||
css = b"""
|
||||
@define-color normal_bg_color #FFFFFF;
|
||||
@define-color shade_bg_color shade(@normal_bg_color, 0.95);
|
||||
#log_view row:nth-child(even) {
|
||||
background-color: @normal_bg_color;
|
||||
}
|
||||
#log_view row:nth-child(odd) {
|
||||
background-color: @shade_bg_color;
|
||||
}
|
||||
#log_view row:selected {
|
||||
background-color: #4488FF;
|
||||
}
|
||||
#log_view {
|
||||
-GtkTreeView-horizontal-separator: 0;
|
||||
-GtkTreeView-vertical-separator: 1;
|
||||
outline-width: 0;
|
||||
outline-offset: 0;
|
||||
}
|
||||
"""
|
||||
|
||||
style_provider = Gtk.CssProvider()
|
||||
style_provider.load_from_data(css)
|
||||
|
||||
Gtk.StyleContext.add_provider_for_screen(
|
||||
Gdk.Screen.get_default(),
|
||||
style_provider,
|
||||
Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION
|
||||
)
|
||||
|
||||
self.open_window()
|
||||
|
||||
def detach(self):
|
||||
|
||||
# TODO: If we take over deferred saving from the inspector, specify now
|
||||
# = True here!
|
||||
self.state.save()
|
||||
|
||||
def run(self):
|
||||
|
||||
try:
|
||||
Common.Main.MainLoopWrapper(Gtk.main, Gtk.main_quit).run()
|
||||
except BaseException:
|
||||
raise
|
||||
else:
|
||||
self.detach()
|
||||
|
||||
def open_window(self):
|
||||
|
||||
self.windows.append(Window(self))
|
||||
|
||||
def close_window(self, window):
|
||||
|
||||
self.windows.remove(window)
|
||||
if not self.windows:
|
||||
# GtkTreeView takes some time to go down for large files. Let's block
|
||||
# until the window is hidden:
|
||||
GObject.idle_add(Gtk.main_quit)
|
||||
Gtk.main()
|
||||
|
||||
Gtk.main_quit()
|
@ -0,0 +1,162 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Debug Viewer - View and analyze GStreamer debug log files
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Debug Viewer GUI module."""
|
||||
|
||||
from gi.repository import Gtk
|
||||
from gi.repository import Gdk
|
||||
|
||||
from GstDebugViewer import Data
|
||||
|
||||
|
||||
class Color (object):
|
||||
|
||||
def __init__(self, hex_24):
|
||||
|
||||
if hex_24.startswith("#"):
|
||||
s = hex_24[1:]
|
||||
else:
|
||||
s = hex_24
|
||||
|
||||
self._fields = tuple((int(hs, 16) for hs in (s[:2], s[2:4], s[4:],)))
|
||||
|
||||
def gdk_color(self):
|
||||
|
||||
return Gdk.color_parse(self.hex_string())
|
||||
|
||||
def hex_string(self):
|
||||
|
||||
return "#%02x%02x%02x" % self._fields
|
||||
|
||||
def float_tuple(self):
|
||||
|
||||
return tuple((float(x) / 255 for x in self._fields))
|
||||
|
||||
def byte_tuple(self):
|
||||
|
||||
return self._fields
|
||||
|
||||
def short_tuple(self):
|
||||
|
||||
return tuple((x << 8 for x in self._fields))
|
||||
|
||||
|
||||
class ColorPalette (object):
|
||||
|
||||
@classmethod
|
||||
def get(cls):
|
||||
|
||||
try:
|
||||
return cls._instance
|
||||
except AttributeError:
|
||||
cls._instance = cls()
|
||||
return cls._instance
|
||||
|
||||
|
||||
class TangoPalette (ColorPalette):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
for name, r, g, b in [("black", 0, 0, 0,),
|
||||
("white", 255, 255, 255,),
|
||||
("butter1", 252, 233, 79),
|
||||
("butter2", 237, 212, 0),
|
||||
("butter3", 196, 160, 0),
|
||||
("chameleon1", 138, 226, 52),
|
||||
("chameleon2", 115, 210, 22),
|
||||
("chameleon3", 78, 154, 6),
|
||||
("orange1", 252, 175, 62),
|
||||
("orange2", 245, 121, 0),
|
||||
("orange3", 206, 92, 0),
|
||||
("skyblue1", 114, 159, 207),
|
||||
("skyblue2", 52, 101, 164),
|
||||
("skyblue3", 32, 74, 135),
|
||||
("plum1", 173, 127, 168),
|
||||
("plum2", 117, 80, 123),
|
||||
("plum3", 92, 53, 102),
|
||||
("chocolate1", 233, 185, 110),
|
||||
("chocolate2", 193, 125, 17),
|
||||
("chocolate3", 143, 89, 2),
|
||||
("scarletred1", 239, 41, 41),
|
||||
("scarletred2", 204, 0, 0),
|
||||
("scarletred3", 164, 0, 0),
|
||||
("aluminium1", 238, 238, 236),
|
||||
("aluminium2", 211, 215, 207),
|
||||
("aluminium3", 186, 189, 182),
|
||||
("aluminium4", 136, 138, 133),
|
||||
("aluminium5", 85, 87, 83),
|
||||
("aluminium6", 46, 52, 54)]:
|
||||
setattr(self, name, Color("%02x%02x%02x" % (r, g, b,)))
|
||||
|
||||
|
||||
class ColorTheme (object):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self.colors = {}
|
||||
|
||||
def add_color(self, key, *colors):
|
||||
|
||||
self.colors[key] = colors
|
||||
|
||||
|
||||
class LevelColorTheme (ColorTheme):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class LevelColorThemeTango (LevelColorTheme):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
LevelColorTheme.__init__(self)
|
||||
|
||||
p = TangoPalette.get()
|
||||
self.add_color(Data.debug_level_none, None, None, None)
|
||||
self.add_color(Data.debug_level_trace, p.black, p.aluminium2)
|
||||
self.add_color(Data.debug_level_fixme, p.black, p.butter3)
|
||||
self.add_color(Data.debug_level_log, p.black, p.plum1)
|
||||
self.add_color(Data.debug_level_debug, p.black, p.skyblue1)
|
||||
self.add_color(Data.debug_level_info, p.black, p.chameleon1)
|
||||
self.add_color(Data.debug_level_warning, p.black, p.orange1)
|
||||
self.add_color(Data.debug_level_error, p.white, p.scarletred1)
|
||||
self.add_color(Data.debug_level_memdump, p.white, p.aluminium3)
|
||||
|
||||
|
||||
class ThreadColorTheme (ColorTheme):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ThreadColorThemeTango (ThreadColorTheme):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
ThreadColorTheme.__init__(self)
|
||||
|
||||
t = TangoPalette.get()
|
||||
for i, color in enumerate([t.butter2,
|
||||
t.orange2,
|
||||
t.chocolate3,
|
||||
t.chameleon2,
|
||||
t.skyblue1,
|
||||
t.plum1,
|
||||
t.scarletred1,
|
||||
t.aluminium6]):
|
||||
self.add_color(i, color)
|
@ -0,0 +1,741 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Debug Viewer - View and analyze GStreamer debug log files
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Debug Viewer GUI module."""
|
||||
|
||||
import logging
|
||||
|
||||
from gi.repository import Gtk, GLib
|
||||
|
||||
from GstDebugViewer import Common, Data
|
||||
from GstDebugViewer.GUI.colors import LevelColorThemeTango
|
||||
from GstDebugViewer.GUI.models import LazyLogModel, LogModelBase
|
||||
|
||||
|
||||
def _(s):
|
||||
return s
|
||||
|
||||
# Sync with gst-inspector!
|
||||
|
||||
|
||||
class Column (object):
|
||||
|
||||
"""A single list view column, managed by a ColumnManager instance."""
|
||||
|
||||
name = None
|
||||
id = None
|
||||
label_header = None
|
||||
get_modify_func = None
|
||||
get_data_func = None
|
||||
get_sort_func = None
|
||||
|
||||
def __init__(self):
|
||||
|
||||
view_column = Gtk.TreeViewColumn(self.label_header)
|
||||
view_column.props.reorderable = True
|
||||
|
||||
self.view_column = view_column
|
||||
|
||||
|
||||
class SizedColumn (Column):
|
||||
|
||||
default_size = None
|
||||
|
||||
def compute_default_size(self):
|
||||
|
||||
return None
|
||||
|
||||
# Sync with gst-inspector?
|
||||
|
||||
|
||||
class TextColumn (SizedColumn):
|
||||
|
||||
font_family = None
|
||||
|
||||
def __init__(self):
|
||||
|
||||
Column.__init__(self)
|
||||
|
||||
column = self.view_column
|
||||
cell = Gtk.CellRendererText()
|
||||
column.pack_start(cell, True)
|
||||
|
||||
cell.props.yalign = 0.
|
||||
cell.props.ypad = 0
|
||||
|
||||
if self.font_family:
|
||||
cell.props.family = self.font_family
|
||||
cell.props.family_set = True
|
||||
|
||||
if self.get_data_func:
|
||||
data_func = self.get_data_func()
|
||||
assert data_func
|
||||
id_ = self.id
|
||||
if id_ is not None:
|
||||
def cell_data_func(column, cell, model, tree_iter, user_data):
|
||||
data_func(cell.props, model.get_value(tree_iter, id_))
|
||||
else:
|
||||
cell_data_func = data_func
|
||||
column.set_cell_data_func(cell, cell_data_func)
|
||||
elif not self.get_modify_func:
|
||||
column.add_attribute(cell, "text", self.id)
|
||||
else:
|
||||
self.update_modify_func(column, cell)
|
||||
|
||||
column.props.resizable = True
|
||||
|
||||
def update_modify_func(self, column, cell):
|
||||
|
||||
modify_func = self.get_modify_func()
|
||||
id_ = self.id
|
||||
|
||||
def cell_data_func(column, cell, model, tree_iter, user_data):
|
||||
cell.props.text = modify_func(model.get_value(tree_iter, id_))
|
||||
column.set_cell_data_func(cell, cell_data_func)
|
||||
|
||||
def compute_default_size(self):
|
||||
|
||||
values = self.get_values_for_size()
|
||||
if not values:
|
||||
return SizedColumn.compute_default_size(self)
|
||||
|
||||
cell = self.view_column.get_cells()[0]
|
||||
|
||||
if self.get_modify_func is not None:
|
||||
format = self.get_modify_func()
|
||||
else:
|
||||
def identity(x):
|
||||
return x
|
||||
format = identity
|
||||
max_width = 0
|
||||
for value in values:
|
||||
cell.props.text = format(value)
|
||||
x, y, w, h = self.view_column.cell_get_size()
|
||||
max_width = max(max_width, w)
|
||||
|
||||
return max_width
|
||||
|
||||
def get_values_for_size(self):
|
||||
|
||||
return ()
|
||||
|
||||
|
||||
class TimeColumn (TextColumn):
|
||||
|
||||
name = "time"
|
||||
label_header = _("Time")
|
||||
id = LazyLogModel.COL_TIME
|
||||
font_family = "monospace"
|
||||
|
||||
def __init__(self, *a, **kw):
|
||||
|
||||
self.base_time = 0
|
||||
|
||||
TextColumn.__init__(self, *a, **kw)
|
||||
|
||||
def get_modify_func(self):
|
||||
|
||||
if self.base_time:
|
||||
time_diff_args = Data.time_diff_args
|
||||
base_time = self.base_time
|
||||
|
||||
def format_time(value):
|
||||
return time_diff_args(value - base_time)
|
||||
else:
|
||||
time_args = Data.time_args
|
||||
|
||||
def format_time(value):
|
||||
# TODO: This is hard coded to omit hours.
|
||||
return time_args(value)[2:]
|
||||
|
||||
return format_time
|
||||
|
||||
def get_values_for_size(self):
|
||||
|
||||
values = [0]
|
||||
|
||||
return values
|
||||
|
||||
def set_base_time(self, base_time):
|
||||
|
||||
self.base_time = base_time
|
||||
|
||||
column = self.view_column
|
||||
cell = column.get_cells()[0]
|
||||
self.update_modify_func(column, cell)
|
||||
|
||||
|
||||
class LevelColumn (TextColumn):
|
||||
|
||||
name = "level"
|
||||
label_header = _("L")
|
||||
id = LazyLogModel.COL_LEVEL
|
||||
|
||||
def __init__(self):
|
||||
|
||||
TextColumn.__init__(self)
|
||||
|
||||
cell = self.view_column.get_cells()[0]
|
||||
cell.props.xalign = .5
|
||||
|
||||
@staticmethod
|
||||
def get_modify_func():
|
||||
|
||||
def format_level(value):
|
||||
return value.name[0]
|
||||
|
||||
return format_level
|
||||
|
||||
@staticmethod
|
||||
def get_data_func():
|
||||
|
||||
theme = LevelColorThemeTango()
|
||||
colors = dict((level, tuple((c.gdk_color()
|
||||
for c in theme.colors[level])),)
|
||||
for level in Data.debug_levels
|
||||
if level != Data.debug_level_none)
|
||||
|
||||
def level_data_func(cell_props, level):
|
||||
cell_props.text = level.name[0]
|
||||
if level in colors:
|
||||
cell_colors = colors[level]
|
||||
else:
|
||||
cell_colors = (None, None, None,)
|
||||
cell_props.foreground_gdk = cell_colors[0]
|
||||
cell_props.background_gdk = cell_colors[1]
|
||||
|
||||
return level_data_func
|
||||
|
||||
def get_values_for_size(self):
|
||||
|
||||
values = [Data.debug_level_log, Data.debug_level_debug,
|
||||
Data.debug_level_info, Data.debug_level_warning,
|
||||
Data.debug_level_error, Data.debug_level_memdump]
|
||||
|
||||
return values
|
||||
|
||||
|
||||
class PidColumn (TextColumn):
|
||||
|
||||
name = "pid"
|
||||
label_header = _("PID")
|
||||
id = LazyLogModel.COL_PID
|
||||
font_family = "monospace"
|
||||
|
||||
@staticmethod
|
||||
def get_modify_func():
|
||||
|
||||
return str
|
||||
|
||||
def get_values_for_size(self):
|
||||
|
||||
return ["999999"]
|
||||
|
||||
|
||||
class ThreadColumn (TextColumn):
|
||||
|
||||
name = "thread"
|
||||
label_header = _("Thread")
|
||||
id = LazyLogModel.COL_THREAD
|
||||
font_family = "monospace"
|
||||
|
||||
@staticmethod
|
||||
def get_modify_func():
|
||||
|
||||
def format_thread(value):
|
||||
return "0x%07x" % (value,)
|
||||
|
||||
return format_thread
|
||||
|
||||
def get_values_for_size(self):
|
||||
|
||||
return [int("ffffff", 16)]
|
||||
|
||||
|
||||
class CategoryColumn (TextColumn):
|
||||
|
||||
name = "category"
|
||||
label_header = _("Category")
|
||||
id = LazyLogModel.COL_CATEGORY
|
||||
|
||||
def get_values_for_size(self):
|
||||
|
||||
return ["GST_LONG_CATEGORY", "somelongelement"]
|
||||
|
||||
|
||||
class CodeColumn (TextColumn):
|
||||
|
||||
name = "code"
|
||||
label_header = _("Code")
|
||||
id = None
|
||||
|
||||
@staticmethod
|
||||
def get_data_func():
|
||||
|
||||
filename_id = LogModelBase.COL_FILENAME
|
||||
line_number_id = LogModelBase.COL_LINE_NUMBER
|
||||
|
||||
def filename_data_func(column, cell, model, tree_iter, user_data):
|
||||
args = model.get(tree_iter, filename_id, line_number_id)
|
||||
cell.props.text = "%s:%i" % args
|
||||
|
||||
return filename_data_func
|
||||
|
||||
def get_values_for_size(self):
|
||||
|
||||
return ["gstsomefilename.c:1234"]
|
||||
|
||||
|
||||
class FunctionColumn (TextColumn):
|
||||
|
||||
name = "function"
|
||||
label_header = _("Function")
|
||||
id = LazyLogModel.COL_FUNCTION
|
||||
|
||||
def get_values_for_size(self):
|
||||
|
||||
return ["gst_this_should_be_enough"]
|
||||
|
||||
|
||||
class ObjectColumn (TextColumn):
|
||||
|
||||
name = "object"
|
||||
label_header = _("Object")
|
||||
id = LazyLogModel.COL_OBJECT
|
||||
|
||||
def get_values_for_size(self):
|
||||
|
||||
return ["longobjectname00"]
|
||||
|
||||
|
||||
class MessageColumn (TextColumn):
|
||||
|
||||
name = "message"
|
||||
label_header = _("Message")
|
||||
id = None
|
||||
|
||||
def __init__(self, *a, **kw):
|
||||
|
||||
self.highlighters = {}
|
||||
|
||||
TextColumn.__init__(self, *a, **kw)
|
||||
|
||||
def get_data_func(self):
|
||||
|
||||
highlighters = self.highlighters
|
||||
id_ = LazyLogModel.COL_MESSAGE
|
||||
|
||||
def message_data_func(column, cell, model, tree_iter, user_data):
|
||||
|
||||
msg = model.get_value(tree_iter, id_).decode("utf8", errors="replace")
|
||||
|
||||
if not highlighters:
|
||||
cell.props.text = msg
|
||||
return
|
||||
|
||||
if len(highlighters) > 1:
|
||||
raise NotImplementedError("FIXME: Support more than one...")
|
||||
|
||||
highlighter = list(highlighters.values())[0]
|
||||
row = model[tree_iter]
|
||||
ranges = highlighter(row)
|
||||
if not ranges:
|
||||
cell.props.text = msg
|
||||
else:
|
||||
tags = []
|
||||
prev_end = 0
|
||||
end = None
|
||||
for start, end in ranges:
|
||||
if prev_end < start:
|
||||
tags.append(
|
||||
GLib.markup_escape_text(msg[prev_end:start]))
|
||||
msg_escape = GLib.markup_escape_text(msg[start:end])
|
||||
tags.append("<span foreground=\'#FFFFFF\'"
|
||||
" background=\'#0000FF\'>%s</span>" % (msg_escape,))
|
||||
prev_end = end
|
||||
if end is not None:
|
||||
tags.append(GLib.markup_escape_text(msg[end:]))
|
||||
cell.props.markup = "".join(tags)
|
||||
|
||||
return message_data_func
|
||||
|
||||
def get_values_for_size(self):
|
||||
|
||||
values = ["Just some good minimum size"]
|
||||
|
||||
return values
|
||||
|
||||
|
||||
class ColumnManager (Common.GUI.Manager):
|
||||
|
||||
column_classes = ()
|
||||
|
||||
@classmethod
|
||||
def iter_item_classes(cls):
|
||||
|
||||
return iter(cls.column_classes)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self.view = None
|
||||
self.actions = None
|
||||
self.zoom = 1.0
|
||||
self.__columns_changed_id = None
|
||||
self.columns = []
|
||||
self.column_order = list(self.column_classes)
|
||||
|
||||
self.action_group = Gtk.ActionGroup("ColumnActions")
|
||||
|
||||
def make_entry(col_class):
|
||||
return ("show-%s-column" % (col_class.name,),
|
||||
None,
|
||||
col_class.label_header,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
True,)
|
||||
|
||||
entries = [make_entry(cls) for cls in self.column_classes]
|
||||
self.action_group.add_toggle_actions(entries)
|
||||
|
||||
def iter_items(self):
|
||||
|
||||
return iter(self.columns)
|
||||
|
||||
def attach(self):
|
||||
|
||||
for col_class in self.column_classes:
|
||||
action = self.get_toggle_action(col_class)
|
||||
if action.props.active:
|
||||
self._add_column(col_class())
|
||||
action.connect("toggled",
|
||||
self.__handle_show_column_action_toggled,
|
||||
col_class.name)
|
||||
|
||||
self.__columns_changed_id = self.view.connect("columns-changed",
|
||||
self.__handle_view_columns_changed)
|
||||
|
||||
def detach(self):
|
||||
|
||||
if self.__columns_changed_id is not None:
|
||||
self.view.disconnect(self.__columns_changed_id)
|
||||
self.__columns_changed_id = None
|
||||
|
||||
def attach_sort(self):
|
||||
|
||||
sort_model = self.view.get_model()
|
||||
|
||||
# Inform the sorted tree model of any custom sorting functions.
|
||||
for col_class in self.column_classes:
|
||||
if col_class.get_sort_func:
|
||||
sort_func = col_class.get_sort_func()
|
||||
sort_model.set_sort_func(col_class.id, sort_func)
|
||||
|
||||
def enable_sort(self):
|
||||
|
||||
sort_model = self.view.get_model()
|
||||
|
||||
if sort_model:
|
||||
self.logger.debug("activating sort")
|
||||
sort_model.set_sort_column_id(*self.default_sort)
|
||||
self.default_sort = None
|
||||
else:
|
||||
self.logger.debug("not activating sort (no model set)")
|
||||
|
||||
def disable_sort(self):
|
||||
|
||||
self.logger.debug("deactivating sort")
|
||||
|
||||
sort_model = self.view.get_model()
|
||||
|
||||
self.default_sort = tree_sortable_get_sort_column_id(sort_model)
|
||||
|
||||
sort_model.set_sort_column_id(TREE_SORTABLE_UNSORTED_COLUMN_ID,
|
||||
Gtk.SortType.ASCENDING)
|
||||
|
||||
def set_zoom(self, scale):
|
||||
|
||||
for column in self.columns:
|
||||
cell = column.view_column.get_cells()[0]
|
||||
cell.props.scale = scale
|
||||
column.view_column.queue_resize()
|
||||
|
||||
self.zoom = scale
|
||||
|
||||
def set_base_time(self, base_time):
|
||||
|
||||
try:
|
||||
time_column = self.find_item(name=TimeColumn.name)
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
time_column.set_base_time(base_time)
|
||||
self.size_column(time_column)
|
||||
|
||||
def get_toggle_action(self, column_class):
|
||||
|
||||
action_name = "show-%s-column" % (column_class.name,)
|
||||
return self.action_group.get_action(action_name)
|
||||
|
||||
def get_initial_column_order(self):
|
||||
|
||||
return tuple(self.column_classes)
|
||||
|
||||
def _add_column(self, column):
|
||||
|
||||
name = column.name
|
||||
pos = self.__get_column_insert_position(column)
|
||||
|
||||
if self.view.props.fixed_height_mode:
|
||||
column.view_column.props.sizing = Gtk.TreeViewColumnSizing.FIXED
|
||||
|
||||
cell = column.view_column.get_cells()[0]
|
||||
cell.props.scale = self.zoom
|
||||
|
||||
self.columns.insert(pos, column)
|
||||
self.view.insert_column(column.view_column, pos)
|
||||
|
||||
def _remove_column(self, column):
|
||||
|
||||
self.columns.remove(column)
|
||||
self.view.remove_column(column.view_column)
|
||||
|
||||
def __get_column_insert_position(self, column):
|
||||
|
||||
col_class = self.find_item_class(name=column.name)
|
||||
pos = self.column_order.index(col_class)
|
||||
before = self.column_order[:pos]
|
||||
shown_names = [col.name for col in self.columns]
|
||||
for col_class in before:
|
||||
if col_class.name not in shown_names:
|
||||
pos -= 1
|
||||
return pos
|
||||
|
||||
def __iter_next_hidden(self, column_class):
|
||||
|
||||
pos = self.column_order.index(column_class)
|
||||
rest = self.column_order[pos + 1:]
|
||||
for next_class in rest:
|
||||
try:
|
||||
self.find_item(name=next_class.name)
|
||||
except KeyError:
|
||||
# No instance -- the column is hidden.
|
||||
yield next_class
|
||||
else:
|
||||
break
|
||||
|
||||
def __handle_show_column_action_toggled(self, toggle_action, name):
|
||||
|
||||
if toggle_action.props.active:
|
||||
try:
|
||||
# This should fail.
|
||||
column = self.find_item(name=name)
|
||||
except KeyError:
|
||||
col_class = self.find_item_class(name=name)
|
||||
self._add_column(col_class())
|
||||
else:
|
||||
# Out of sync for some reason.
|
||||
return
|
||||
else:
|
||||
try:
|
||||
column = self.find_item(name=name)
|
||||
except KeyError:
|
||||
# Out of sync for some reason.
|
||||
return
|
||||
else:
|
||||
self._remove_column(column)
|
||||
|
||||
def __handle_view_columns_changed(self, element_view):
|
||||
|
||||
view_columns = element_view.get_columns()
|
||||
new_visible = [self.find_item(view_column=column)
|
||||
for column in view_columns]
|
||||
|
||||
# We only care about reordering here.
|
||||
if len(new_visible) != len(self.columns):
|
||||
return
|
||||
|
||||
if new_visible != self.columns:
|
||||
|
||||
new_order = []
|
||||
for column in new_visible:
|
||||
col_class = self.find_item_class(name=column.name)
|
||||
new_order.append(col_class)
|
||||
new_order.extend(self.__iter_next_hidden(col_class))
|
||||
|
||||
names = (column.name for column in new_visible)
|
||||
self.logger.debug("visible columns reordered: %s",
|
||||
", ".join(names))
|
||||
|
||||
self.columns[:] = new_visible
|
||||
self.column_order[:] = new_order
|
||||
|
||||
|
||||
class ViewColumnManager (ColumnManager):
|
||||
|
||||
column_classes = (
|
||||
TimeColumn, LevelColumn, PidColumn, ThreadColumn, CategoryColumn,
|
||||
CodeColumn, FunctionColumn, ObjectColumn, MessageColumn,)
|
||||
|
||||
default_column_classes = (
|
||||
TimeColumn, LevelColumn, CategoryColumn, CodeColumn,
|
||||
FunctionColumn, ObjectColumn, MessageColumn,)
|
||||
|
||||
def __init__(self, state):
|
||||
|
||||
ColumnManager.__init__(self)
|
||||
|
||||
self.logger = logging.getLogger("ui.columns")
|
||||
|
||||
self.state = state
|
||||
|
||||
def attach(self, view):
|
||||
|
||||
self.view = view
|
||||
view.connect("notify::model", self.__handle_notify_model)
|
||||
|
||||
order = self.state.column_order
|
||||
if len(order) == len(self.column_classes):
|
||||
self.column_order[:] = order
|
||||
|
||||
visible = self.state.columns_visible
|
||||
if not visible:
|
||||
visible = self.default_column_classes
|
||||
for col_class in self.column_classes:
|
||||
action = self.get_toggle_action(col_class)
|
||||
action.props.active = (col_class in visible)
|
||||
|
||||
ColumnManager.attach(self)
|
||||
|
||||
self.columns_sized = False
|
||||
|
||||
def detach(self):
|
||||
|
||||
self.state.column_order = self.column_order
|
||||
self.state.columns_visible = self.columns
|
||||
|
||||
return ColumnManager.detach(self)
|
||||
|
||||
def set_zoom(self, scale):
|
||||
|
||||
ColumnManager.set_zoom(self, scale)
|
||||
|
||||
if self.view is None:
|
||||
return
|
||||
|
||||
# Timestamp and log level columns are pretty much fixed size, so resize
|
||||
# them back to default on zoom change:
|
||||
names = (TimeColumn.name,
|
||||
LevelColumn.name,
|
||||
PidColumn.name,
|
||||
ThreadColumn.name)
|
||||
for column in self.columns:
|
||||
if column.name in names:
|
||||
self.size_column(column)
|
||||
|
||||
def size_column(self, column):
|
||||
|
||||
if column.default_size is None:
|
||||
default_size = column.compute_default_size()
|
||||
else:
|
||||
default_size = column.default_size
|
||||
# FIXME: Abstract away fixed size setting in Column class!
|
||||
if default_size is None:
|
||||
# Dummy fallback:
|
||||
column.view_column.props.fixed_width = 50
|
||||
self.logger.warning(
|
||||
"%s column does not implement default size", column.name)
|
||||
else:
|
||||
column.view_column.props.fixed_width = default_size
|
||||
|
||||
def _add_column(self, column):
|
||||
|
||||
result = ColumnManager._add_column(self, column)
|
||||
self.size_column(column)
|
||||
return result
|
||||
|
||||
def _remove_column(self, column):
|
||||
|
||||
column.default_size = column.view_column.props.fixed_width
|
||||
return ColumnManager._remove_column(self, column)
|
||||
|
||||
def __handle_notify_model(self, view, gparam):
|
||||
|
||||
if self.columns_sized:
|
||||
# Already sized.
|
||||
return
|
||||
model = self.view.get_model()
|
||||
if model is None:
|
||||
return
|
||||
self.logger.debug("model changed, sizing columns")
|
||||
for column in self.iter_items():
|
||||
self.size_column(column)
|
||||
self.columns_sized = True
|
||||
|
||||
|
||||
class WrappingMessageColumn (MessageColumn):
|
||||
|
||||
def wrap_to_width(self, width):
|
||||
|
||||
col = self.view_column
|
||||
col.props.max_width = width
|
||||
col.get_cells()[0].props.wrap_width = width
|
||||
col.queue_resize()
|
||||
|
||||
|
||||
class LineViewColumnManager (ColumnManager):
|
||||
|
||||
column_classes = (TimeColumn, WrappingMessageColumn,)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
ColumnManager.__init__(self)
|
||||
|
||||
def attach(self, window):
|
||||
|
||||
self.__size_update = None
|
||||
|
||||
self.view = window.widgets.line_view
|
||||
self.view.set_size_request(0, 0)
|
||||
self.view.connect_after("size-allocate", self.__handle_size_allocate)
|
||||
ColumnManager.attach(self)
|
||||
|
||||
def __update_sizes(self):
|
||||
|
||||
view_width = self.view.get_allocation().width
|
||||
if view_width == self.__size_update:
|
||||
# Prevent endless recursion.
|
||||
return
|
||||
|
||||
self.__size_update = view_width
|
||||
|
||||
col = self.find_item(name="time")
|
||||
other_width = col.view_column.props.width
|
||||
|
||||
try:
|
||||
col = self.find_item(name="message")
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
width = view_width - other_width
|
||||
col.wrap_to_width(width)
|
||||
|
||||
def __handle_size_allocate(self, self_, allocation):
|
||||
|
||||
self.__update_sizes()
|
@ -0,0 +1,114 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Debug Viewer - View and analyze GStreamer debug log files
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Debug Viewer GUI module."""
|
||||
|
||||
from GstDebugViewer.GUI.models import LogModelBase
|
||||
|
||||
|
||||
def get_comparison_function(all_but_this):
|
||||
|
||||
if (all_but_this):
|
||||
return lambda x, y: x == y
|
||||
else:
|
||||
return lambda x, y: x != y
|
||||
|
||||
|
||||
class Filter (object):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class DebugLevelFilter (Filter):
|
||||
|
||||
only_this, all_but_this, this_and_above = range(3)
|
||||
|
||||
def __init__(self, debug_level, mode=0):
|
||||
|
||||
col_id = LogModelBase.COL_LEVEL
|
||||
if mode == self.this_and_above:
|
||||
def comparison_function(x, y):
|
||||
return x < y
|
||||
else:
|
||||
comparison_function = get_comparison_function(
|
||||
mode == self.all_but_this)
|
||||
|
||||
def filter_func(row):
|
||||
return comparison_function(row[col_id], debug_level)
|
||||
self.filter_func = filter_func
|
||||
|
||||
|
||||
class CategoryFilter (Filter):
|
||||
|
||||
def __init__(self, category, all_but_this=False):
|
||||
|
||||
col_id = LogModelBase.COL_CATEGORY
|
||||
comparison_function = get_comparison_function(all_but_this)
|
||||
|
||||
def category_filter_func(row):
|
||||
return comparison_function(row[col_id], category)
|
||||
self.filter_func = category_filter_func
|
||||
|
||||
|
||||
class ObjectFilter (Filter):
|
||||
|
||||
def __init__(self, object_, all_but_this=False):
|
||||
|
||||
col_id = LogModelBase.COL_OBJECT
|
||||
comparison_function = get_comparison_function(all_but_this)
|
||||
|
||||
def object_filter_func(row):
|
||||
return comparison_function(row[col_id], object_)
|
||||
self.filter_func = object_filter_func
|
||||
|
||||
|
||||
class FunctionFilter (Filter):
|
||||
|
||||
def __init__(self, function_, all_but_this=False):
|
||||
|
||||
col_id = LogModelBase.COL_FUNCTION
|
||||
comparison_function = get_comparison_function(all_but_this)
|
||||
|
||||
def function_filter_func(row):
|
||||
return comparison_function(row[col_id], function_)
|
||||
self.filter_func = function_filter_func
|
||||
|
||||
|
||||
class ThreadFilter (Filter):
|
||||
|
||||
def __init__(self, thread_, all_but_this=False):
|
||||
|
||||
col_id = LogModelBase.COL_THREAD
|
||||
comparison_function = get_comparison_function(all_but_this)
|
||||
|
||||
def thread_filter_func(row):
|
||||
return comparison_function(row[col_id], thread_)
|
||||
self.filter_func = thread_filter_func
|
||||
|
||||
|
||||
class FilenameFilter (Filter):
|
||||
|
||||
def __init__(self, filename, all_but_this=False):
|
||||
|
||||
col_id = LogModelBase.COL_FILENAME
|
||||
comparison_function = get_comparison_function(all_but_this)
|
||||
|
||||
def filename_filter_func(row):
|
||||
return comparison_function(row[col_id], filename)
|
||||
self.filter_func = filename_filter_func
|
@ -0,0 +1,498 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Debug Viewer - View and analyze GStreamer debug log files
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Debug Viewer GUI module."""
|
||||
|
||||
from array import array
|
||||
from bisect import bisect_left
|
||||
import logging
|
||||
|
||||
from gi.repository import GObject
|
||||
from gi.repository import Gtk
|
||||
|
||||
from GstDebugViewer import Common, Data
|
||||
|
||||
|
||||
class LogModelBase (Common.GUI.GenericTreeModel, metaclass=Common.GUI.MetaModel):
|
||||
|
||||
columns = ("COL_TIME", GObject.TYPE_UINT64,
|
||||
"COL_PID", int,
|
||||
"COL_THREAD", GObject.TYPE_UINT64,
|
||||
"COL_LEVEL", object,
|
||||
"COL_CATEGORY", str,
|
||||
"COL_FILENAME", str,
|
||||
"COL_LINE_NUMBER", int,
|
||||
"COL_FUNCTION", str,
|
||||
"COL_OBJECT", str,
|
||||
"COL_MESSAGE", str,)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
Common.GUI.GenericTreeModel.__init__(self)
|
||||
|
||||
# self.props.leak_references = False
|
||||
|
||||
self.line_offsets = array("I")
|
||||
self.line_levels = [] # FIXME: Not so nice!
|
||||
self.line_cache = {}
|
||||
|
||||
def ensure_cached(self, line_offset):
|
||||
|
||||
raise NotImplementedError("derived classes must override this method")
|
||||
|
||||
def access_offset(self, offset):
|
||||
|
||||
raise NotImplementedError("derived classes must override this method")
|
||||
|
||||
def iter_rows_offset(self):
|
||||
|
||||
ensure_cached = self.ensure_cached
|
||||
line_cache = self.line_cache
|
||||
line_levels = self.line_levels
|
||||
COL_LEVEL = self.COL_LEVEL
|
||||
COL_MESSAGE = self.COL_MESSAGE
|
||||
access_offset = self.access_offset
|
||||
|
||||
for i, offset in enumerate(self.line_offsets):
|
||||
ensure_cached(offset)
|
||||
row = line_cache[offset]
|
||||
# adjust special rows
|
||||
row[COL_LEVEL] = line_levels[i]
|
||||
msg_offset = row[COL_MESSAGE]
|
||||
row[COL_MESSAGE] = access_offset(offset + msg_offset)
|
||||
yield (row, offset,)
|
||||
row[COL_MESSAGE] = msg_offset
|
||||
|
||||
def on_get_flags(self):
|
||||
|
||||
flags = Gtk.TreeModelFlags.LIST_ONLY | Gtk.TreeModelFlags.ITERS_PERSIST
|
||||
|
||||
return flags
|
||||
|
||||
def on_get_n_columns(self):
|
||||
|
||||
return len(self.column_types)
|
||||
|
||||
def on_get_column_type(self, col_id):
|
||||
|
||||
return self.column_types[col_id]
|
||||
|
||||
def on_get_iter(self, path):
|
||||
|
||||
if not path:
|
||||
return
|
||||
|
||||
if len(path) > 1:
|
||||
# Flat model.
|
||||
return None
|
||||
|
||||
line_index = path[0]
|
||||
|
||||
if line_index > len(self.line_offsets) - 1:
|
||||
return None
|
||||
|
||||
return line_index
|
||||
|
||||
def on_get_path(self, rowref):
|
||||
|
||||
line_index = rowref
|
||||
|
||||
return (line_index,)
|
||||
|
||||
def on_get_value(self, line_index, col_id):
|
||||
|
||||
last_index = len(self.line_offsets) - 1
|
||||
|
||||
if line_index > last_index:
|
||||
return None
|
||||
|
||||
if col_id == self.COL_LEVEL:
|
||||
return self.line_levels[line_index]
|
||||
|
||||
line_offset = self.line_offsets[line_index]
|
||||
self.ensure_cached(line_offset)
|
||||
|
||||
value = self.line_cache[line_offset][col_id]
|
||||
if col_id == self.COL_MESSAGE:
|
||||
# strip whitespace + newline
|
||||
value = self.access_offset(line_offset + value).strip()
|
||||
elif col_id in (self.COL_TIME, self.COL_THREAD):
|
||||
value = GObject.Value(GObject.TYPE_UINT64, value)
|
||||
|
||||
return value
|
||||
|
||||
def get_value_range(self, col_id, start, stop):
|
||||
|
||||
if col_id != self.COL_LEVEL:
|
||||
raise NotImplementedError("XXX FIXME")
|
||||
|
||||
return self.line_levels[start:stop]
|
||||
|
||||
def on_iter_next(self, line_index):
|
||||
|
||||
last_index = len(self.line_offsets) - 1
|
||||
|
||||
if line_index >= last_index:
|
||||
return None
|
||||
else:
|
||||
return line_index + 1
|
||||
|
||||
def on_iter_children(self, parent):
|
||||
|
||||
return self.on_iter_nth_child(parent, 0)
|
||||
|
||||
def on_iter_has_child(self, rowref):
|
||||
|
||||
return False
|
||||
|
||||
def on_iter_n_children(self, rowref):
|
||||
|
||||
if rowref is not None:
|
||||
return 0
|
||||
|
||||
return len(self.line_offsets)
|
||||
|
||||
def on_iter_nth_child(self, parent, n):
|
||||
|
||||
last_index = len(self.line_offsets) - 1
|
||||
|
||||
if parent or n > last_index:
|
||||
return None
|
||||
|
||||
return n
|
||||
|
||||
def on_iter_parent(self, child):
|
||||
|
||||
return None
|
||||
|
||||
# def on_ref_node (self, rowref):
|
||||
|
||||
# pass
|
||||
|
||||
# def on_unref_node (self, rowref):
|
||||
|
||||
# pass
|
||||
|
||||
|
||||
class LazyLogModel (LogModelBase):
|
||||
|
||||
def __init__(self, log_obj=None):
|
||||
|
||||
LogModelBase.__init__(self)
|
||||
|
||||
self.__log_obj = log_obj
|
||||
|
||||
if log_obj:
|
||||
self.set_log(log_obj)
|
||||
|
||||
def set_log(self, log_obj):
|
||||
|
||||
self.__fileobj = log_obj.fileobj
|
||||
|
||||
self.line_cache.clear()
|
||||
self.line_offsets = log_obj.line_cache.offsets
|
||||
self.line_levels = log_obj.line_cache.levels
|
||||
|
||||
def access_offset(self, offset):
|
||||
|
||||
# TODO: Implement using one slice access instead of seek+readline.
|
||||
self.__fileobj.seek(offset)
|
||||
return self.__fileobj.readline()
|
||||
|
||||
def ensure_cached(self, line_offset):
|
||||
|
||||
if line_offset in self.line_cache:
|
||||
return
|
||||
|
||||
if len(self.line_cache) > 10000:
|
||||
self.line_cache.clear()
|
||||
|
||||
self.__fileobj.seek(line_offset)
|
||||
line = self.__fileobj.readline()
|
||||
|
||||
self.line_cache[line_offset] = Data.LogLine.parse_full(line)
|
||||
|
||||
|
||||
class FilteredLogModelBase (LogModelBase):
|
||||
|
||||
def __init__(self, super_model):
|
||||
|
||||
LogModelBase.__init__(self)
|
||||
|
||||
self.logger = logging.getLogger("filter-model-base")
|
||||
|
||||
self.super_model = super_model
|
||||
self.access_offset = super_model.access_offset
|
||||
self.ensure_cached = super_model.ensure_cached
|
||||
self.line_cache = super_model.line_cache
|
||||
|
||||
def line_index_to_super(self, line_index):
|
||||
|
||||
raise NotImplementedError("index conversion not supported")
|
||||
|
||||
def line_index_from_super(self, super_line_index):
|
||||
|
||||
raise NotImplementedError("index conversion not supported")
|
||||
|
||||
|
||||
class FilteredLogModel (FilteredLogModelBase):
|
||||
|
||||
def __init__(self, super_model):
|
||||
|
||||
FilteredLogModelBase.__init__(self, super_model)
|
||||
|
||||
self.logger = logging.getLogger("filtered-log-model")
|
||||
|
||||
self.filters = []
|
||||
self.reset()
|
||||
self.__active_process = None
|
||||
self.__filter_progress = 0.
|
||||
|
||||
def reset(self):
|
||||
|
||||
self.logger.debug("reset filter")
|
||||
|
||||
self.line_offsets = self.super_model.line_offsets
|
||||
self.line_levels = self.super_model.line_levels
|
||||
self.super_index = range(len(self.line_offsets))
|
||||
|
||||
del self.filters[:]
|
||||
|
||||
def __filter_process(self, filter):
|
||||
|
||||
YIELD_LIMIT = 10000
|
||||
|
||||
self.logger.debug("preparing new filter")
|
||||
new_line_offsets = array("I")
|
||||
new_line_levels = []
|
||||
new_super_index = array("I")
|
||||
level_id = self.COL_LEVEL
|
||||
func = filter.filter_func
|
||||
|
||||
def enum():
|
||||
i = 0
|
||||
for row, offset in self.iter_rows_offset():
|
||||
line_index = self.super_index[i]
|
||||
yield (line_index, row, offset,)
|
||||
i += 1
|
||||
self.logger.debug("running filter")
|
||||
progress = 0.
|
||||
progress_full = float(len(self))
|
||||
y = YIELD_LIMIT
|
||||
for i, row, offset in enum():
|
||||
if func(row):
|
||||
new_line_offsets.append(offset)
|
||||
new_line_levels.append(row[level_id])
|
||||
new_super_index.append(i)
|
||||
y -= 1
|
||||
if y == 0:
|
||||
progress += float(YIELD_LIMIT)
|
||||
self.__filter_progress = progress / progress_full
|
||||
y = YIELD_LIMIT
|
||||
yield True
|
||||
self.line_offsets = new_line_offsets
|
||||
self.line_levels = new_line_levels
|
||||
self.super_index = new_super_index
|
||||
self.logger.debug("filtering finished")
|
||||
|
||||
self.__filter_progress = 1.
|
||||
self.__handle_filter_process_finished()
|
||||
yield False
|
||||
|
||||
def add_filter(self, filter, dispatcher):
|
||||
|
||||
if self.__active_process is not None:
|
||||
raise ValueError("dispatched a filter process already")
|
||||
|
||||
self.logger.debug("adding filter")
|
||||
|
||||
self.filters.append(filter)
|
||||
|
||||
self.__dispatcher = dispatcher
|
||||
self.__active_process = self.__filter_process(filter)
|
||||
dispatcher(self.__active_process)
|
||||
|
||||
def abort_process(self):
|
||||
|
||||
if self.__active_process is None:
|
||||
raise ValueError("no filter process running")
|
||||
|
||||
self.__dispatcher.cancel()
|
||||
self.__active_process = None
|
||||
self.__dispatcher = None
|
||||
|
||||
del self.filters[-1]
|
||||
|
||||
def get_filter_progress(self):
|
||||
|
||||
if self.__active_process is None:
|
||||
raise ValueError("no filter process running")
|
||||
|
||||
return self.__filter_progress
|
||||
|
||||
def __handle_filter_process_finished(self):
|
||||
|
||||
self.__active_process = None
|
||||
self.handle_process_finished()
|
||||
|
||||
def handle_process_finished(self):
|
||||
|
||||
pass
|
||||
|
||||
def line_index_from_super(self, super_line_index):
|
||||
|
||||
return bisect_left(self.super_index, super_line_index)
|
||||
|
||||
def line_index_to_super(self, line_index):
|
||||
|
||||
return self.super_index[line_index]
|
||||
|
||||
def set_range(self, super_start, super_stop):
|
||||
|
||||
old_super_start = self.line_index_to_super(0)
|
||||
old_super_stop = self.line_index_to_super(
|
||||
len(self.super_index) - 1) + 1
|
||||
|
||||
self.logger.debug("set range (%i, %i), current (%i, %i)",
|
||||
super_start, super_stop, old_super_start, old_super_stop)
|
||||
|
||||
if len(self.filters) == 0:
|
||||
# Identity.
|
||||
self.super_index = range(super_start, super_stop)
|
||||
self.line_offsets = SubRange(self.super_model.line_offsets,
|
||||
super_start, super_stop)
|
||||
self.line_levels = SubRange(self.super_model.line_levels,
|
||||
super_start, super_stop)
|
||||
return
|
||||
|
||||
if super_start < old_super_start:
|
||||
# TODO:
|
||||
raise NotImplementedError("Only handling further restriction of the range"
|
||||
" (start offset = %i)" % (super_start,))
|
||||
|
||||
if super_stop > old_super_stop:
|
||||
# TODO:
|
||||
raise NotImplementedError("Only handling further restriction of the range"
|
||||
" (end offset = %i)" % (super_stop,))
|
||||
|
||||
start = self.line_index_from_super(super_start)
|
||||
stop = self.line_index_from_super(super_stop)
|
||||
|
||||
self.super_index = SubRange(self.super_index, start, stop)
|
||||
self.line_offsets = SubRange(self.line_offsets, start, stop)
|
||||
self.line_levels = SubRange(self.line_levels, start, stop)
|
||||
|
||||
|
||||
class SubRange (object):
|
||||
|
||||
__slots__ = ("size", "start", "stop",)
|
||||
|
||||
def __init__(self, size, start, stop):
|
||||
|
||||
if start > stop:
|
||||
raise ValueError(
|
||||
"need start <= stop (got %r, %r)" % (start, stop,))
|
||||
|
||||
if isinstance(size, type(self)):
|
||||
# Another SubRange, don't stack:
|
||||
start += size.start
|
||||
stop += size.start
|
||||
size = size.size
|
||||
|
||||
self.size = size
|
||||
self.start = start
|
||||
self.stop = stop
|
||||
|
||||
def __getitem__(self, i):
|
||||
|
||||
if isinstance(i, slice):
|
||||
stop = i.stop
|
||||
if stop >= 0:
|
||||
stop += self.start
|
||||
else:
|
||||
stop += self.stop
|
||||
|
||||
return self.size[i.start + self.start:stop]
|
||||
else:
|
||||
return self.size[i + self.start]
|
||||
|
||||
def __len__(self):
|
||||
|
||||
return self.stop - self.start
|
||||
|
||||
def __iter__(self):
|
||||
|
||||
size = self.size
|
||||
for i in range(self.start, self.stop):
|
||||
yield size[i]
|
||||
|
||||
|
||||
class LineViewLogModel (FilteredLogModelBase):
|
||||
|
||||
def __init__(self, super_model):
|
||||
|
||||
FilteredLogModelBase.__init__(self, super_model)
|
||||
|
||||
self.line_offsets = []
|
||||
self.line_levels = []
|
||||
|
||||
self.parent_indices = []
|
||||
|
||||
def reset(self):
|
||||
|
||||
del self.line_offsets[:]
|
||||
del self.line_levels[:]
|
||||
|
||||
def line_index_to_super(self, line_index):
|
||||
|
||||
return self.parent_indices[line_index]
|
||||
|
||||
def insert_line(self, position, super_line_index):
|
||||
|
||||
if position == -1:
|
||||
position = len(self.line_offsets)
|
||||
li = super_line_index
|
||||
self.line_offsets.insert(position, self.super_model.line_offsets[li])
|
||||
self.line_levels.insert(position, self.super_model.line_levels[li])
|
||||
self.parent_indices.insert(position, super_line_index)
|
||||
|
||||
path = (position,)
|
||||
tree_iter = self.get_iter(path)
|
||||
self.row_inserted(path, tree_iter)
|
||||
|
||||
def replace_line(self, line_index, super_line_index):
|
||||
|
||||
li = line_index
|
||||
self.line_offsets[li] = self.super_model.line_offsets[super_line_index]
|
||||
self.line_levels[li] = self.super_model.line_levels[super_line_index]
|
||||
self.parent_indices[li] = super_line_index
|
||||
|
||||
path = (line_index,)
|
||||
tree_iter = self.get_iter(path)
|
||||
self.row_changed(path, tree_iter)
|
||||
|
||||
def remove_line(self, line_index):
|
||||
|
||||
for l in (self.line_offsets,
|
||||
self.line_levels,
|
||||
self.parent_indices,):
|
||||
del l[line_index]
|
||||
|
||||
path = (line_index,)
|
||||
self.row_deleted(path)
|
1068
subprojects/gst-devtools/debug-viewer/GstDebugViewer/GUI/window.py
Normal file
1068
subprojects/gst-devtools/debug-viewer/GstDebugViewer/GUI/window.py
Normal file
File diff suppressed because it is too large
Load Diff
61
subprojects/gst-devtools/debug-viewer/GstDebugViewer/Main.py
Normal file
61
subprojects/gst-devtools/debug-viewer/GstDebugViewer/Main.py
Normal file
@ -0,0 +1,61 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Debug Viewer - View and analyze GStreamer debug log files
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Debug Viewer Main module."""
|
||||
|
||||
import sys
|
||||
import optparse
|
||||
from gettext import gettext as _, ngettext
|
||||
|
||||
from gi.repository import GLib
|
||||
|
||||
from GstDebugViewer import GUI
|
||||
import GstDebugViewer.Common.Main
|
||||
Common = GstDebugViewer.Common
|
||||
|
||||
GETTEXT_DOMAIN = "gst-debug-viewer"
|
||||
|
||||
|
||||
def main_version(opt, value, parser, *args, **kwargs):
|
||||
|
||||
from GstDebugViewer import version
|
||||
|
||||
print("GStreamer Debug Viewer %s" % (version,))
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
class Paths (Common.Main.PathsProgramBase):
|
||||
|
||||
program_name = "gst-debug-viewer"
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(
|
||||
_("%prog [OPTION...] [FILENAME]"),
|
||||
description=_("Display and analyze GStreamer debug log files"))
|
||||
parser.add_option("--version", "-v",
|
||||
action="callback",
|
||||
dest="version",
|
||||
callback=main_version,
|
||||
help=_("Display version and exit"))
|
||||
|
||||
Common.Main.main(main_function=GUI.main,
|
||||
option_parser=parser,
|
||||
gettext_domain=GETTEXT_DOMAIN,
|
||||
paths=Paths)
|
@ -0,0 +1,497 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Debug Viewer - View and analyze GStreamer debug log files
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Debug Viewer timeline widget plugin."""
|
||||
|
||||
import logging
|
||||
|
||||
from GstDebugViewer import Common, Data, GUI
|
||||
from GstDebugViewer.Plugins import FeatureBase, PluginBase, _N
|
||||
|
||||
from gettext import gettext as _
|
||||
from gi.repository import GObject, GLib
|
||||
from gi.repository import Gtk
|
||||
|
||||
|
||||
class SearchOperation (object):
|
||||
|
||||
def __init__(self, model, search_text, search_forward=True, start_position=None):
|
||||
|
||||
self.model = model
|
||||
if isinstance(search_text, str):
|
||||
self.search_text = search_text.encode('utf8')
|
||||
else:
|
||||
self.search_text = search_text
|
||||
self.search_forward = search_forward
|
||||
self.start_position = start_position
|
||||
|
||||
col_id = GUI.models.LogModelBase.COL_MESSAGE
|
||||
len_search_text = len(self.search_text)
|
||||
|
||||
def match_func(model_row):
|
||||
|
||||
message = model_row[col_id]
|
||||
if self.search_text in message:
|
||||
ranges = []
|
||||
start = 0
|
||||
while True:
|
||||
pos = message.find(self.search_text, start)
|
||||
if pos == -1:
|
||||
break
|
||||
ranges.append((pos, pos + len_search_text,))
|
||||
start = pos + len_search_text
|
||||
return ranges
|
||||
else:
|
||||
return ()
|
||||
|
||||
self.match_func = match_func
|
||||
|
||||
|
||||
class SearchSentinel (object):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self.dispatcher = Common.Data.GSourceDispatcher()
|
||||
self.cancelled = False
|
||||
|
||||
def run_for(self, operation):
|
||||
|
||||
self.dispatcher.cancel()
|
||||
self.dispatcher(self.__process(operation))
|
||||
self.cancelled = False
|
||||
|
||||
def abort(self):
|
||||
|
||||
self.dispatcher.cancel()
|
||||
self.cancelled = True
|
||||
|
||||
def __process(self, operation):
|
||||
|
||||
model = operation.model
|
||||
|
||||
if operation.start_position is not None:
|
||||
start_pos = operation.start_position
|
||||
elif operation.search_forward:
|
||||
start_pos = 0
|
||||
else:
|
||||
start_pos = len(model) - 1
|
||||
|
||||
start_iter = model.iter_nth_child(None, start_pos)
|
||||
|
||||
match_func = operation.match_func
|
||||
if operation.search_forward:
|
||||
iter_next = model.iter_next
|
||||
else:
|
||||
# FIXME: This is really ugly.
|
||||
nth_child = model.iter_nth_child
|
||||
|
||||
def iter_next_():
|
||||
for i in range(start_pos, -1, -1):
|
||||
yield nth_child(None, i)
|
||||
yield None
|
||||
it_ = iter_next_()
|
||||
|
||||
def iter_next(it):
|
||||
return it_.__next__()
|
||||
|
||||
YIELD_LIMIT = 1000
|
||||
i = YIELD_LIMIT
|
||||
tree_iter = start_iter
|
||||
while tree_iter and not self.cancelled:
|
||||
i -= 1
|
||||
if i == 0:
|
||||
yield True
|
||||
i = YIELD_LIMIT
|
||||
row = model[tree_iter]
|
||||
if match_func(row):
|
||||
self.handle_match_found(model, tree_iter)
|
||||
tree_iter = iter_next(tree_iter)
|
||||
|
||||
if not self.cancelled:
|
||||
self.handle_search_complete()
|
||||
yield False
|
||||
|
||||
def handle_match_found(self, model, tree_iter):
|
||||
|
||||
pass
|
||||
|
||||
def handle_search_complete(self):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class FindBarWidget (Gtk.HBox):
|
||||
|
||||
__status = {"no-match-found": _N("No match found"),
|
||||
"searching": _N("Searching...")}
|
||||
|
||||
def __init__(self, action_group):
|
||||
|
||||
GObject.GObject.__init__(self)
|
||||
|
||||
label = Gtk.Label(label=_("Find:"))
|
||||
self.pack_start(label, False, False, 2)
|
||||
|
||||
self.entry = Gtk.Entry()
|
||||
self.pack_start(self.entry, True, True, 0)
|
||||
|
||||
prev_action = action_group.get_action("goto-previous-search-result")
|
||||
prev_button = Gtk.Button()
|
||||
prev_button.set_related_action(prev_action)
|
||||
self.pack_start(prev_button, False, False, 0)
|
||||
|
||||
next_action = action_group.get_action("goto-next-search-result")
|
||||
next_button = Gtk.Button()
|
||||
next_button.set_related_action(next_action)
|
||||
self.pack_start(next_button, False, False, 0)
|
||||
|
||||
self.status_label = Gtk.Label()
|
||||
self.status_label.props.xalign = 0.
|
||||
self.status_label.props.use_markup = True
|
||||
self.pack_start(self.status_label, False, False, 6)
|
||||
self.__compute_status_size()
|
||||
self.status_label.connect("notify::style", self.__handle_notify_style)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def __compute_status_size(self):
|
||||
|
||||
label = self.status_label
|
||||
old_markup = label.props.label
|
||||
label.set_size_request(-1, -1)
|
||||
max_width = 0
|
||||
try:
|
||||
for status in self.__status.values():
|
||||
self.__set_status(_(status))
|
||||
req = label.size_request()
|
||||
max_width = max(max_width, req.width)
|
||||
label.set_size_request(max_width, -1)
|
||||
finally:
|
||||
label.props.label = old_markup
|
||||
|
||||
def __handle_notify_style(self, *a, **kw):
|
||||
|
||||
self.__compute_status_size()
|
||||
|
||||
def __set_status(self, text):
|
||||
|
||||
markup = "<b>%s</b>" % (GLib.markup_escape_text(text),)
|
||||
|
||||
self.status_label.props.label = markup
|
||||
|
||||
def status_no_match_found(self):
|
||||
|
||||
self.__set_status(_(self.__status["no-match-found"]))
|
||||
|
||||
def status_searching(self):
|
||||
|
||||
self.__set_status(_(self.__status["searching"]))
|
||||
|
||||
def clear_status(self):
|
||||
|
||||
self.__set_status("")
|
||||
|
||||
|
||||
class FindBarFeature (FeatureBase):
|
||||
|
||||
def __init__(self, app):
|
||||
|
||||
FeatureBase.__init__(self, app)
|
||||
|
||||
self.logger = logging.getLogger("ui.findbar")
|
||||
|
||||
self.action_group = Gtk.ActionGroup("FindBarActions")
|
||||
self.action_group.add_toggle_actions([("show-find-bar",
|
||||
None,
|
||||
_("Find Bar"),
|
||||
"<Ctrl>F")])
|
||||
self.action_group.add_actions([("goto-next-search-result",
|
||||
None, _("Goto Next Match"),
|
||||
"<Ctrl>G"),
|
||||
("goto-previous-search-result",
|
||||
None, _("Goto Previous Match"),
|
||||
"<Ctrl><Shift>G")])
|
||||
|
||||
self.bar = None
|
||||
self.operation = None
|
||||
self.search_state = None
|
||||
self.next_match = None
|
||||
self.prev_match = None
|
||||
self.scroll_match = False
|
||||
|
||||
self.sentinel = SearchSentinel()
|
||||
self.sentinel.handle_match_found = self.handle_match_found
|
||||
self.sentinel.handle_search_complete = self.handle_search_complete
|
||||
|
||||
def scroll_view_to_line(self, line_index):
|
||||
|
||||
view = self.log_view
|
||||
|
||||
path = Gtk.TreePath((line_index,))
|
||||
|
||||
start_path, end_path = view.get_visible_range()
|
||||
|
||||
if path >= start_path and path <= end_path:
|
||||
self.logger.debug(
|
||||
"line index %i already visible, not scrolling", line_index)
|
||||
return
|
||||
|
||||
self.logger.debug("scrolling to line_index %i", line_index)
|
||||
view.scroll_to_cell(path, use_align=True, row_align=.5)
|
||||
|
||||
def handle_attach_window(self, window):
|
||||
|
||||
self.window = window
|
||||
|
||||
ui = window.ui_manager
|
||||
|
||||
ui.insert_action_group(self.action_group, 0)
|
||||
|
||||
self.log_view = window.log_view
|
||||
|
||||
self.merge_id = ui.new_merge_id()
|
||||
for name, action_name in [("ViewFindBar", "show-find-bar",),
|
||||
("ViewNextResult",
|
||||
"goto-next-search-result",),
|
||||
("ViewPrevResult", "goto-previous-search-result",)]:
|
||||
ui.add_ui(self.merge_id, "/menubar/ViewMenu/ViewMenuAdditions",
|
||||
name, action_name, Gtk.UIManagerItemType.MENUITEM, False)
|
||||
|
||||
box = window.widgets.vbox_view
|
||||
self.bar = FindBarWidget(self.action_group)
|
||||
box.pack_end(self.bar, False, False, 0)
|
||||
self.bar.hide()
|
||||
|
||||
action = self.action_group.get_action("show-find-bar")
|
||||
handler = self.handle_show_find_bar_action_toggled
|
||||
action.connect("toggled", handler)
|
||||
|
||||
action = self.action_group.get_action("goto-previous-search-result")
|
||||
handler = self.handle_goto_previous_search_result_action_activate
|
||||
action.props.sensitive = False
|
||||
action.connect("activate", handler)
|
||||
|
||||
action = self.action_group.get_action("goto-next-search-result")
|
||||
handler = self.handle_goto_next_search_result_action_activate
|
||||
action.props.sensitive = False
|
||||
action.connect("activate", handler)
|
||||
|
||||
self.bar.entry.connect("changed", self.handle_entry_changed)
|
||||
|
||||
def handle_detach_window(self, window):
|
||||
|
||||
self.window = None
|
||||
|
||||
window.ui_manager.remove_ui(self.merge_id)
|
||||
self.merge_id = None
|
||||
|
||||
def handle_show_find_bar_action_toggled(self, action):
|
||||
|
||||
if action.props.active:
|
||||
self.bar.show()
|
||||
self.bar.entry.grab_focus()
|
||||
self.update_search()
|
||||
else:
|
||||
try:
|
||||
column = self.window.column_manager.find_item(
|
||||
name="message")
|
||||
del column.highlighters[self]
|
||||
except KeyError:
|
||||
pass
|
||||
self.bar.clear_status()
|
||||
self.bar.hide()
|
||||
for action_name in ["goto-next-search-result",
|
||||
"goto-previous-search-result"]:
|
||||
self.action_group.get_action(
|
||||
action_name).props.sensitive = False
|
||||
|
||||
def handle_goto_previous_search_result_action_activate(self, action):
|
||||
|
||||
if self.prev_match is None:
|
||||
self.logger.warning("inconsistent action sensitivity")
|
||||
return
|
||||
|
||||
self.scroll_view_to_line(self.prev_match)
|
||||
self.prev_match = None
|
||||
|
||||
start_path = self.log_view.get_visible_range()[0]
|
||||
new_position = start_path[0] - 1
|
||||
self.start_search_operation(start_position=new_position,
|
||||
forward=False)
|
||||
|
||||
# FIXME
|
||||
|
||||
def handle_goto_next_search_result_action_activate(self, action):
|
||||
|
||||
if self.next_match is None:
|
||||
self.logger.warning("inconsistent action sensitivity")
|
||||
return
|
||||
|
||||
self.scroll_view_to_line(self.next_match)
|
||||
self.next_match = None
|
||||
|
||||
end_path = self.log_view.get_visible_range()[1]
|
||||
new_position = end_path[0] + 1
|
||||
self.start_search_operation(start_position=new_position,
|
||||
forward=True)
|
||||
# FIXME: Finish.
|
||||
|
||||
# model = self.log_view.get_model ()
|
||||
|
||||
# start_path, end_path = self.log_view.get_visible_range ()
|
||||
# start_index, end_index = start_path[0], end_path[0]
|
||||
|
||||
# for line_index in self.matches:
|
||||
# if line_index > end_index:
|
||||
# break
|
||||
# else:
|
||||
# return
|
||||
|
||||
# self.scroll_view_to_line (line_index)
|
||||
|
||||
def handle_entry_changed(self, entry):
|
||||
|
||||
self.update_search()
|
||||
|
||||
def update_search(self):
|
||||
|
||||
model = self.log_view.get_model()
|
||||
search_text = self.bar.entry.props.text
|
||||
column = self.window.column_manager.find_item(name="message")
|
||||
if search_text == "":
|
||||
self.logger.debug("search string set to '', aborting search")
|
||||
self.search_state = None
|
||||
self.next_match = None
|
||||
self.prev_match = None
|
||||
self.update_sensitivity()
|
||||
self.sentinel.abort()
|
||||
try:
|
||||
del column.highlighters[self]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
self.logger.debug("starting search for %r", search_text)
|
||||
self.next_match = None
|
||||
self.prev_match = None
|
||||
self.update_sensitivity()
|
||||
self.scroll_match = True
|
||||
|
||||
start_path = self.log_view.get_visible_range()[0]
|
||||
self.start_search_operation(
|
||||
search_text, start_position=start_path[0])
|
||||
self.bar.status_searching()
|
||||
column.highlighters[self] = self.operation.match_func
|
||||
|
||||
self.window.update_view()
|
||||
|
||||
def update_sensitivity(self):
|
||||
|
||||
for name, value in (("goto-next-search-result", self.next_match,),
|
||||
("goto-previous-search-result", self.prev_match,),):
|
||||
action = self.action_group.get_action(name)
|
||||
action.props.sensitive = (value is not None)
|
||||
|
||||
def start_search_operation(self, search_text=None, forward=True, start_position=None):
|
||||
|
||||
model = self.log_view.get_model()
|
||||
|
||||
if forward:
|
||||
self.search_state = "search-forward"
|
||||
if start_position is None:
|
||||
start_position = 0
|
||||
else:
|
||||
self.search_state = "search-backward"
|
||||
if start_position is None:
|
||||
start_position = len(model) - 1
|
||||
|
||||
if search_text is None:
|
||||
operation = self.operation
|
||||
if operation is None:
|
||||
raise ValueError(
|
||||
"search_text not given but have no previous search operation")
|
||||
search_text = operation.search_text
|
||||
|
||||
self.operation = SearchOperation(model, search_text,
|
||||
start_position=start_position,
|
||||
search_forward=forward)
|
||||
self.sentinel.run_for(self.operation)
|
||||
|
||||
def handle_match_found(self, model, tree_iter):
|
||||
|
||||
if self.search_state not in ("search-forward", "search-backward",):
|
||||
self.logger.warning(
|
||||
"inconsistent search state %r", self.search_state)
|
||||
return
|
||||
|
||||
line_index = model.get_path(tree_iter)[0]
|
||||
forward_search = (self.search_state == "search-forward")
|
||||
|
||||
if forward_search:
|
||||
self.logger.debug("forward search for %r matches line %i",
|
||||
self.operation.search_text, line_index)
|
||||
else:
|
||||
self.logger.debug("backward search for %r matches line %i",
|
||||
self.operation.search_text, line_index)
|
||||
|
||||
self.sentinel.abort()
|
||||
|
||||
if self.scroll_match:
|
||||
self.logger.debug("scrolling to matching line")
|
||||
self.scroll_view_to_line(line_index)
|
||||
# Now search for the next one:
|
||||
self.scroll_match = False
|
||||
# FIXME: Start with first line that is outside of the visible
|
||||
# range.
|
||||
self.start_search_operation(start_position=line_index + 1,
|
||||
forward=forward_search)
|
||||
else:
|
||||
if forward_search:
|
||||
self.next_match = line_index
|
||||
|
||||
self.search_state = "search-backward"
|
||||
self.start_search_operation(forward=False,
|
||||
start_position=line_index - 1)
|
||||
else:
|
||||
self.prev_match = line_index
|
||||
self.update_sensitivity()
|
||||
self.bar.clear_status()
|
||||
|
||||
def handle_search_complete(self):
|
||||
|
||||
if self.search_state == "search-forward":
|
||||
self.logger.debug("forward search for %r reached last line",
|
||||
self.operation.search_text)
|
||||
self.next_match = None
|
||||
elif self.search_state == "search-backward":
|
||||
self.logger.debug("backward search for %r reached first line",
|
||||
self.operation.search_text)
|
||||
self.prev_match = None
|
||||
else:
|
||||
self.logger.warning("inconsistent search state %r",
|
||||
self.search_state)
|
||||
return
|
||||
|
||||
self.update_sensitivity()
|
||||
if self.prev_match is None and self.next_match is None:
|
||||
self.bar.status_no_match_found()
|
||||
|
||||
|
||||
class Plugin (PluginBase):
|
||||
|
||||
features = (FindBarFeature,)
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,103 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Debug Viewer - View and analyze GStreamer debug log files
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Debug Viewer Plugins package."""
|
||||
|
||||
__all__ = ["_", "_N", "FeatureBase", "PluginBase"]
|
||||
|
||||
import os.path
|
||||
|
||||
|
||||
def _N(s):
|
||||
return s
|
||||
|
||||
|
||||
def load(paths=()):
|
||||
|
||||
for path in paths:
|
||||
for plugin_module in _load_plugins(path):
|
||||
yield plugin_module.Plugin
|
||||
|
||||
|
||||
def _load_plugins(path):
|
||||
|
||||
import imp
|
||||
import glob
|
||||
|
||||
files = glob.glob(os.path.join(path, "*.py"))
|
||||
|
||||
for filename in files:
|
||||
|
||||
name = os.path.basename(os.path.splitext(filename)[0])
|
||||
if name == "__init__":
|
||||
continue
|
||||
fp, pathname, description = imp.find_module(name, [path])
|
||||
module = imp.load_module(name, fp, pathname, description)
|
||||
yield module
|
||||
|
||||
|
||||
class FeatureBase (object):
|
||||
|
||||
def __init__(self, app):
|
||||
|
||||
pass
|
||||
|
||||
def handle_attach_window(self, window):
|
||||
"""
|
||||
window: GstDebugViewer.GUI.window.Window
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def handle_attach_log_file(self, window, log_file):
|
||||
"""
|
||||
window: GstDebugViewer.GUI.window.Window
|
||||
log_file: GstDebugViewer.Data.LogFile
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def handle_detach_log_file(self, window, log_file):
|
||||
"""
|
||||
window: GstDebugViewer.GUI.window.Window
|
||||
log_file: GstDebugViewer.Data.LogFile
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def handle_detach_window(self, window):
|
||||
"""
|
||||
window: GstDebugViewer.GUI.window.Window
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class PluginBase (object):
|
||||
"""
|
||||
All plugins must implement a class called Plugin inheriting from PluginBase.
|
||||
They should place a tuple of features they export into 'features'. Each
|
||||
feature should be a subclass of FeatureBase.
|
||||
"""
|
||||
|
||||
features = ()
|
||||
|
||||
def __init__(self, app):
|
||||
|
||||
pass
|
@ -0,0 +1,29 @@
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Debug Viewer - View and analyze GStreamer debug log files
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Debug Viewer package."""
|
||||
|
||||
version = "@VERSION@"
|
||||
|
||||
if version.startswith('@'):
|
||||
version = 'master'
|
||||
|
||||
__version__ = version
|
||||
|
||||
from GstDebugViewer.Main import Paths, GETTEXT_DOMAIN, main as run # noqa
|
@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
|
||||
def line_string(ts, pid, thread, level, category, filename, line, function,
|
||||
object_, message):
|
||||
|
||||
# Replicates gstreamer/gst/gstinfo.c:gst_debug_log_default.
|
||||
|
||||
# FIXME: Regarding object_, this doesn't fully replicate the formatting!
|
||||
return "%s %5d 0x%x %s %20s %s:%d:%s:<%s> %s" % (Data.time_args(ts), pid, thread,
|
||||
level.name.ljust(
|
||||
5), category,
|
||||
filename, line, function,
|
||||
object_, message,)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
import sys
|
||||
import os.path
|
||||
sys.path.append(os.path.dirname(os.path.dirname(sys.argv[0])))
|
||||
|
||||
global Data
|
||||
from GstDebugViewer import Data
|
||||
|
||||
count = 100000
|
||||
|
||||
ts = 0
|
||||
pid = 12345
|
||||
thread = int("89abcdef", 16)
|
||||
level = Data.debug_level_log
|
||||
category = "GST_DUMMY"
|
||||
filename = "gstdummyfilename.c"
|
||||
file_line = 1
|
||||
function = "gst_dummy_function"
|
||||
object_ = "dummyobj0"
|
||||
message = "dummy message with no content"
|
||||
|
||||
levels = (Data.debug_level_log,
|
||||
Data.debug_level_debug,
|
||||
Data.debug_level_info,)
|
||||
|
||||
shift = 0
|
||||
for i in range(count):
|
||||
|
||||
ts = i * 10000
|
||||
shift += i % (count // 100)
|
||||
level = levels[(i + shift) % 3]
|
||||
print(line_string(ts, pid, thread, level, category, filename, file_line,
|
||||
function, object_, message))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
79
subprojects/gst-devtools/debug-viewer/GstDebugViewer/tests/performance.py
Executable file
79
subprojects/gst-devtools/debug-viewer/GstDebugViewer/tests/performance.py
Executable file
@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Debug Viewer - View and analyze GStreamer debug log files
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Debug Viewer performance test program."""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
from glob import glob
|
||||
import time
|
||||
|
||||
import gi
|
||||
|
||||
from gi.repository import GObject
|
||||
|
||||
from .. import Common, Data, GUI
|
||||
|
||||
|
||||
class TestParsingPerformance (object):
|
||||
|
||||
def __init__(self, filename):
|
||||
|
||||
self.main_loop = GObject.MainLoop()
|
||||
self.log_file = Data.LogFile(filename, Common.Data.DefaultDispatcher())
|
||||
self.log_file.consumers.append(self)
|
||||
|
||||
def start(self):
|
||||
|
||||
self.log_file.start_loading()
|
||||
|
||||
def handle_load_started(self):
|
||||
|
||||
self.start_time = time.time()
|
||||
|
||||
def handle_load_finished(self):
|
||||
|
||||
diff = time.time() - self.start_time
|
||||
print("line cache built in %0.1f ms" % (diff * 1000.,))
|
||||
|
||||
start_time = time.time()
|
||||
model = GUI.LazyLogModel(self.log_file)
|
||||
for row in model:
|
||||
pass
|
||||
diff = time.time() - start_time
|
||||
print("model iterated in %0.1f ms" % (diff * 1000.,))
|
||||
print("overall time spent: %0.1f s" % (time.time() - self.start_time,))
|
||||
|
||||
import resource
|
||||
rusage = resource.getrusage(resource.RUSAGE_SELF)
|
||||
print("time spent in user mode: %.2f s" % (rusage.ru_utime,))
|
||||
print("time spent in system mode: %.2f s" % (rusage.ru_stime,))
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
test = TestParsingPerformance(sys.argv[1])
|
||||
test.start()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
281
subprojects/gst-devtools/debug-viewer/GstDebugViewer/tests/test_models.py
Executable file
281
subprojects/gst-devtools/debug-viewer/GstDebugViewer/tests/test_models.py
Executable file
@ -0,0 +1,281 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Debug Viewer - View and analyze GStreamer debug log files
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Debug Viewer test suite for the custom tree models."""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
from glob import glob
|
||||
|
||||
from unittest import TestCase, main as test_main
|
||||
|
||||
from .. import Common, Data
|
||||
from .. GUI.filters import CategoryFilter, Filter
|
||||
from .. GUI.models import (FilteredLogModel,
|
||||
LogModelBase,
|
||||
SubRange,)
|
||||
|
||||
|
||||
class TestSubRange (TestCase):
|
||||
|
||||
def test_len(self):
|
||||
|
||||
values = list(range(20))
|
||||
|
||||
sr = SubRange(values, 0, 20)
|
||||
self.assertEqual(len(sr), 20)
|
||||
|
||||
sr = SubRange(values, 10, 20)
|
||||
self.assertEqual(len(sr), 10)
|
||||
|
||||
sr = SubRange(values, 0, 10)
|
||||
self.assertEqual(len(sr), 10)
|
||||
|
||||
sr = SubRange(values, 5, 15)
|
||||
self.assertEqual(len(sr), 10)
|
||||
|
||||
def test_iter(self):
|
||||
|
||||
values = list(range(20))
|
||||
|
||||
sr = SubRange(values, 0, 20)
|
||||
self.assertEqual(list(sr), values)
|
||||
|
||||
sr = SubRange(values, 10, 20)
|
||||
self.assertEqual(list(sr), list(range(10, 20)))
|
||||
|
||||
sr = SubRange(values, 0, 10)
|
||||
self.assertEqual(list(sr), list(range(0, 10)))
|
||||
|
||||
sr = SubRange(values, 5, 15)
|
||||
self.assertEqual(list(sr), list(range(5, 15)))
|
||||
|
||||
|
||||
class Model (LogModelBase):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
LogModelBase.__init__(self)
|
||||
|
||||
for i in range(20):
|
||||
self.line_offsets.append(i * 100)
|
||||
self.line_levels.append(Data.debug_level_debug)
|
||||
|
||||
def ensure_cached(self, line_offset):
|
||||
|
||||
pid = line_offset // 100
|
||||
if pid % 2 == 0:
|
||||
category = b"EVEN"
|
||||
else:
|
||||
category = b"ODD"
|
||||
|
||||
line_fmt = (b"0:00:00.000000000 %5i 0x0000000 DEBUG "
|
||||
b"%20s dummy.c:1:dummy: dummy")
|
||||
line_str = line_fmt % (pid, category,)
|
||||
log_line = Data.LogLine.parse_full(line_str)
|
||||
self.line_cache[line_offset] = log_line
|
||||
|
||||
def access_offset(self, line_offset):
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
class IdentityFilter (Filter):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
def filter_func(row):
|
||||
return True
|
||||
self.filter_func = filter_func
|
||||
|
||||
|
||||
class RandomFilter (Filter):
|
||||
|
||||
def __init__(self, seed):
|
||||
|
||||
import random
|
||||
rand = random.Random()
|
||||
rand.seed(seed)
|
||||
|
||||
def filter_func(row):
|
||||
return rand.choice((True, False,))
|
||||
self.filter_func = filter_func
|
||||
|
||||
|
||||
class TestDynamicFilter (TestCase):
|
||||
|
||||
def test_unset_filter_rerange(self):
|
||||
|
||||
full_model = Model()
|
||||
filtered_model = FilteredLogModel(full_model)
|
||||
row_list = self.__row_list
|
||||
|
||||
self.assertEqual(row_list(full_model), list(range(20)))
|
||||
self.assertEqual(row_list(filtered_model), list(range(20)))
|
||||
|
||||
filtered_model.set_range(5, 16)
|
||||
|
||||
self.assertEqual(row_list(filtered_model), list(range(5, 16)))
|
||||
|
||||
def test_identity_filter_rerange(self):
|
||||
|
||||
full_model = Model()
|
||||
filtered_model = FilteredLogModel(full_model)
|
||||
row_list = self.__row_list
|
||||
|
||||
self.assertEqual(row_list(full_model), list(range(20)))
|
||||
self.assertEqual(row_list(filtered_model), list(range(20)))
|
||||
|
||||
filtered_model.add_filter(IdentityFilter(),
|
||||
Common.Data.DefaultDispatcher())
|
||||
filtered_model.set_range(5, 16)
|
||||
|
||||
self.assertEqual(row_list(filtered_model), list(range(5, 16)))
|
||||
|
||||
def test_filtered_range_refilter_skip(self):
|
||||
|
||||
full_model = Model()
|
||||
filtered_model = FilteredLogModel(full_model)
|
||||
|
||||
row_list = self.__row_list
|
||||
|
||||
filtered_model.add_filter(CategoryFilter("EVEN"),
|
||||
Common.Data.DefaultDispatcher())
|
||||
self.__dump_model(filtered_model, "filtered")
|
||||
|
||||
self.assertEqual(row_list(filtered_model), list(range(1, 20, 2)))
|
||||
self.assertEqual([filtered_model.line_index_from_super(i)
|
||||
for i in range(1, 20, 2)],
|
||||
list(range(10)))
|
||||
self.assertEqual([filtered_model.line_index_to_super(i)
|
||||
for i in range(10)],
|
||||
list(range(1, 20, 2)))
|
||||
|
||||
filtered_model.set_range(1, 20)
|
||||
self.__dump_model(filtered_model, "ranged (1, 20)")
|
||||
self.__dump_model(filtered_model, "filtered range")
|
||||
|
||||
self.assertEqual([filtered_model.line_index_from_super(i)
|
||||
for i in range(0, 19, 2)],
|
||||
list(range(10)))
|
||||
self.assertEqual([filtered_model.line_index_to_super(i)
|
||||
for i in range(10)],
|
||||
list(range(1, 20, 2)))
|
||||
|
||||
filtered_model.set_range(2, 20)
|
||||
self.__dump_model(filtered_model, "ranged (2, 20)")
|
||||
|
||||
self.assertEqual(row_list(filtered_model), list(range(3, 20, 2)))
|
||||
|
||||
def test_filtered_range_refilter(self):
|
||||
|
||||
full_model = Model()
|
||||
filtered_model = FilteredLogModel(full_model)
|
||||
|
||||
row_list = self.__row_list
|
||||
rows = row_list(full_model)
|
||||
rows_filtered = row_list(filtered_model)
|
||||
|
||||
self.__dump_model(full_model, "full model")
|
||||
|
||||
self.assertEqual(rows, rows_filtered)
|
||||
|
||||
self.assertEqual([filtered_model.line_index_from_super(i)
|
||||
for i in range(20)],
|
||||
list(range(20)))
|
||||
self.assertEqual([filtered_model.line_index_to_super(i)
|
||||
for i in range(20)],
|
||||
list(range(20)))
|
||||
|
||||
filtered_model.set_range(5, 16)
|
||||
self.__dump_model(filtered_model, "ranged model (5, 16)")
|
||||
|
||||
rows_ranged = row_list(filtered_model)
|
||||
self.assertEqual(rows_ranged, list(range(5, 16)))
|
||||
|
||||
self.__dump_model(filtered_model, "filtered model (nofilter, 5, 15)")
|
||||
|
||||
filtered_model.add_filter(CategoryFilter("EVEN"),
|
||||
Common.Data.DefaultDispatcher())
|
||||
rows_filtered = row_list(filtered_model)
|
||||
self.assertEqual(rows_filtered, list(range(5, 16, 2)))
|
||||
|
||||
self.__dump_model(filtered_model, "filtered model")
|
||||
|
||||
def test_random_filtered_range_refilter(self):
|
||||
|
||||
full_model = Model()
|
||||
filtered_model = FilteredLogModel(full_model)
|
||||
row_list = self.__row_list
|
||||
|
||||
self.assertEqual(row_list(full_model), list(range(20)))
|
||||
self.assertEqual(row_list(filtered_model), list(range(20)))
|
||||
|
||||
filtered_model.add_filter(RandomFilter(538295943),
|
||||
Common.Data.DefaultDispatcher())
|
||||
random_rows = row_list(filtered_model)
|
||||
|
||||
self.__dump_model(filtered_model)
|
||||
|
||||
filtered_model = FilteredLogModel(full_model)
|
||||
filtered_model.add_filter(RandomFilter(538295943),
|
||||
Common.Data.DefaultDispatcher())
|
||||
self.__dump_model(filtered_model, "filtered model")
|
||||
self.assertEqual(row_list(filtered_model), random_rows)
|
||||
|
||||
filtered_model.set_range(1, 10)
|
||||
self.__dump_model(filtered_model)
|
||||
self.assertEqual(row_list(filtered_model), [
|
||||
x for x in range(0, 10) if x in random_rows])
|
||||
|
||||
def __row_list(self, model):
|
||||
|
||||
return [row[Model.COL_PID] for row in model]
|
||||
|
||||
def __dump_model(self, model, comment=None):
|
||||
|
||||
# TODO: Provide a command line option to turn this on and off.
|
||||
|
||||
return
|
||||
|
||||
if not hasattr(model, "super_model"):
|
||||
# Top model.
|
||||
print("\t(%s)" % ("|".join([str(i).rjust(2)
|
||||
for i in self.__row_list(model)]),), end=' ')
|
||||
else:
|
||||
top_model = model.super_model
|
||||
if hasattr(top_model, "super_model"):
|
||||
top_model = top_model.super_model
|
||||
top_indices = self.__row_list(top_model)
|
||||
positions = self.__row_list(model)
|
||||
output = [" "] * len(top_indices)
|
||||
for i, position in enumerate(positions):
|
||||
output[position] = str(i).rjust(2)
|
||||
print("\t(%s)" % ("|".join(output),), end=' ')
|
||||
|
||||
if comment is None:
|
||||
print()
|
||||
else:
|
||||
print(comment)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_main()
|
9
subprojects/gst-devtools/debug-viewer/MANIFEST.in
Normal file
9
subprojects/gst-devtools/debug-viewer/MANIFEST.in
Normal file
@ -0,0 +1,9 @@
|
||||
recursive-include GstDebugViewer *.py
|
||||
recursive-include data *.glade *.ui *.svg *.png
|
||||
recursive-include po *.po
|
||||
recursive-include tests *.py
|
||||
include gst-debug-viewer
|
||||
include gst-debug-viewer.desktop.in
|
||||
include org.freedesktop.GstDebugViewer.appdata.xml.in
|
||||
include AUTHORS COPYING ChangeLog MANIFEST.in NEWS README TODO
|
||||
include po/POTFILES.in
|
36
subprojects/gst-devtools/debug-viewer/README
Normal file
36
subprojects/gst-devtools/debug-viewer/README
Normal file
@ -0,0 +1,36 @@
|
||||
# how to build #
|
||||
|
||||
./setup.py build; sudo ./setup.py install --prefix=/usr
|
||||
sudo chmod a+r /usr/share/gst-debug-viewer/*.ui
|
||||
|
||||
# porting issues #
|
||||
|
||||
http://stackoverflow.com/questions/11025700/generictreemodel-with-pygobject-introspection-gtk-3
|
||||
|
||||
# tips #
|
||||
|
||||
OLD: prev_action.connect_proxy(prev_button)
|
||||
NEW: prev_button.set_related_action (prev_action)
|
||||
|
||||
OLD: box.pack_start (widget)
|
||||
NEW: box.pack_start (widget, True, True, 0)
|
||||
|
||||
OLD: column.pack_start (cell)
|
||||
NEW: column.pack_start (cell, True)
|
||||
|
||||
OLD: view_column.get_cell_renderers ()
|
||||
NEW: column.get_cells ()
|
||||
|
||||
# porting ressources #
|
||||
https://www.xpra.org/trac/ticket/90?cversion=0&cnum_hist=3
|
||||
https://mail.gnome.org/archives/commits-list/2013-October/msg05205.html
|
||||
|
||||
# profiling #
|
||||
python -m profile -o output.pstats path/to/your/script arg1 arg2
|
||||
gprof2dot.py -f pstats output.pstats | dot -Tpng -o output.png
|
||||
~/projects/tools/gprof2dot/gprof2dot.py -f pstats output.pstats | dot -Tpng -o output.png
|
||||
eog output.png
|
||||
|
||||
python -m cProfile -o output.pstats2 ./gst-debug-viewer debug.noansi.log
|
||||
~/projects/tools/gprof2dot/gprof2dot.py -f pstats output.pstats2 | dot -Tpng -o output2.png
|
||||
eog output2.png
|
701
subprojects/gst-devtools/debug-viewer/data/about-dialog.ui
Normal file
701
subprojects/gst-devtools/debug-viewer/data/about-dialog.ui
Normal file
@ -0,0 +1,701 @@
|
||||
<?xml version="1.0"?>
|
||||
<interface>
|
||||
<!-- interface-requires gtk+ 2.12 -->
|
||||
<!-- interface-naming-policy toplevel-contextual -->
|
||||
<object class="GtkAboutDialog" id="about_dialog">
|
||||
<property name="visible">True</property>
|
||||
<property name="border_width">5</property>
|
||||
<property name="type_hint">dialog</property>
|
||||
<property name="copyright" translatable="yes">Copyright © 2007-2009 René Stadler</property>
|
||||
<property name="comments" translatable="yes">View and analyze GStreamer debug files</property>
|
||||
<property name="license"> GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.</property>
|
||||
<property name="authors">René Stadler <mail@renestadler.de></property>
|
||||
<property name="translator_credits" translatable="yes" comments="TRANSLATORS: Replace this string with your names, one name per line.">translator-credits</property>
|
||||
<property name="logo">gst-debug-viewer.png</property>
|
||||
<child internal-child="vbox">
|
||||
<object class="GtkVBox" id="dialog-vbox1">
|
||||
<child internal-child="action_area">
|
||||
<object class="GtkHButtonBox" id="dialog-action_area1"/>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="pack_type">end</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
</interface>
|
BIN
subprojects/gst-devtools/debug-viewer/data/gst-debug-viewer.png
Normal file
BIN
subprojects/gst-devtools/debug-viewer/data/gst-debug-viewer.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.0 KiB |
399
subprojects/gst-devtools/debug-viewer/data/gst-debug-viewer.svg
Normal file
399
subprojects/gst-devtools/debug-viewer/data/gst-debug-viewer.svg
Normal file
@ -0,0 +1,399 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Generator: Adobe Illustrator 12.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 51448) -->
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://web.resource.org/cc/"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
version="1.0"
|
||||
id="Layer_1"
|
||||
width="48"
|
||||
height="48"
|
||||
viewBox="0 0 280.22 69.387"
|
||||
overflow="visible"
|
||||
enable-background="new 0 0 280.22 69.387"
|
||||
xml:space="preserve"
|
||||
sodipodi:version="0.32"
|
||||
inkscape:version="0.45"
|
||||
sodipodi:docname="gst-inspector.svg"
|
||||
sodipodi:docbase="/home/cymacs/Desktop"
|
||||
inkscape:output_extension="org.inkscape.output.svg.inkscape"
|
||||
inkscape:export-filename="/home/cymacs/Desktop/gst-inspector.png"
|
||||
inkscape:export-xdpi="120"
|
||||
inkscape:export-ydpi="120"
|
||||
sodipodi:modified="TRUE"><metadata
|
||||
id="metadata30"><rdf:RDF><cc:Work
|
||||
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><defs
|
||||
id="defs28"><linearGradient
|
||||
id="linearGradient2846"><stop
|
||||
id="stop2848"
|
||||
offset="0.0000000"
|
||||
style="stop-color:#8a8a8a;stop-opacity:1.0000000;" /><stop
|
||||
id="stop2850"
|
||||
offset="1.0000000"
|
||||
style="stop-color:#484848;stop-opacity:1.0000000;" /></linearGradient><linearGradient
|
||||
id="linearGradient2366"><stop
|
||||
id="stop2368"
|
||||
offset="0"
|
||||
style="stop-color:#ffffff;stop-opacity:1;" /><stop
|
||||
style="stop-color:#ffffff;stop-opacity:0.21904762;"
|
||||
offset="0.50000000"
|
||||
id="stop2374" /><stop
|
||||
id="stop2370"
|
||||
offset="1.0000000"
|
||||
style="stop-color:#ffffff;stop-opacity:1.0000000;" /></linearGradient><linearGradient
|
||||
inkscape:collect="always"
|
||||
id="linearGradient4477"><stop
|
||||
style="stop-color:#000000;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop4479" /><stop
|
||||
style="stop-color:#000000;stop-opacity:0;"
|
||||
offset="1"
|
||||
id="stop4481" /></linearGradient><linearGradient
|
||||
id="linearGradient4467"><stop
|
||||
style="stop-color:#ffffff;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop4469" /><stop
|
||||
style="stop-color:#ffffff;stop-opacity:0.24761905;"
|
||||
offset="1.0000000"
|
||||
id="stop4471" /></linearGradient><linearGradient
|
||||
id="linearGradient4454"><stop
|
||||
style="stop-color:#729fcf;stop-opacity:0.20784314;"
|
||||
offset="0.0000000"
|
||||
id="stop4456" /><stop
|
||||
style="stop-color:#729fcf;stop-opacity:0.67619050;"
|
||||
offset="1.0000000"
|
||||
id="stop4458" /></linearGradient><linearGradient
|
||||
id="linearGradient4440"><stop
|
||||
style="stop-color:#7d7d7d;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop4442" /><stop
|
||||
id="stop4448"
|
||||
offset="0.50000000"
|
||||
style="stop-color:#b1b1b1;stop-opacity:1.0000000;" /><stop
|
||||
style="stop-color:#686868;stop-opacity:1.0000000;"
|
||||
offset="1.0000000"
|
||||
id="stop4444" /></linearGradient><radialGradient
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="matrix(1,0,0,0.284916,0,30.08928)"
|
||||
r="15.821514"
|
||||
fy="42.07798"
|
||||
fx="24.306795"
|
||||
cy="42.07798"
|
||||
cx="24.306795"
|
||||
id="radialGradient4548"
|
||||
xlink:href="#linearGradient4542"
|
||||
inkscape:collect="always" /><linearGradient
|
||||
id="linearGradient259"><stop
|
||||
style="stop-color:#fafafa;stop-opacity:1.0000000;"
|
||||
offset="0.0000000"
|
||||
id="stop260" /><stop
|
||||
style="stop-color:#bbbbbb;stop-opacity:1.0000000;"
|
||||
offset="1.0000000"
|
||||
id="stop261" /></linearGradient><linearGradient
|
||||
id="linearGradient269"><stop
|
||||
style="stop-color:#a3a3a3;stop-opacity:1.0000000;"
|
||||
offset="0.0000000"
|
||||
id="stop270" /><stop
|
||||
style="stop-color:#4c4c4c;stop-opacity:1.0000000;"
|
||||
offset="1.0000000"
|
||||
id="stop271" /></linearGradient><radialGradient
|
||||
gradientUnits="userSpaceOnUse"
|
||||
fy="114.5684"
|
||||
fx="20.892099"
|
||||
r="5.256"
|
||||
cy="114.5684"
|
||||
cx="20.892099"
|
||||
id="aigrd2"><stop
|
||||
id="stop15566"
|
||||
style="stop-color:#F0F0F0"
|
||||
offset="0" /><stop
|
||||
id="stop15568"
|
||||
style="stop-color:#9a9a9a;stop-opacity:1.0000000;"
|
||||
offset="1.0000000" /></radialGradient><radialGradient
|
||||
gradientUnits="userSpaceOnUse"
|
||||
fy="64.567902"
|
||||
fx="20.892099"
|
||||
r="5.257"
|
||||
cy="64.567902"
|
||||
cx="20.892099"
|
||||
id="aigrd3"><stop
|
||||
id="stop15573"
|
||||
style="stop-color:#F0F0F0"
|
||||
offset="0" /><stop
|
||||
id="stop15575"
|
||||
style="stop-color:#9a9a9a;stop-opacity:1.0000000;"
|
||||
offset="1.0000000" /></radialGradient><linearGradient
|
||||
id="linearGradient15662"><stop
|
||||
style="stop-color:#ffffff;stop-opacity:1.0000000;"
|
||||
offset="0.0000000"
|
||||
id="stop15664" /><stop
|
||||
style="stop-color:#f8f8f8;stop-opacity:1.0000000;"
|
||||
offset="1.0000000"
|
||||
id="stop15666" /></linearGradient><linearGradient
|
||||
id="linearGradient4542"
|
||||
inkscape:collect="always"><stop
|
||||
id="stop4544"
|
||||
offset="0"
|
||||
style="stop-color:#000000;stop-opacity:1;" /><stop
|
||||
id="stop4546"
|
||||
offset="1"
|
||||
style="stop-color:#000000;stop-opacity:0;" /></linearGradient><linearGradient
|
||||
id="linearGradient5048"><stop
|
||||
id="stop5050"
|
||||
offset="0"
|
||||
style="stop-color:black;stop-opacity:0;" /><stop
|
||||
style="stop-color:black;stop-opacity:1;"
|
||||
offset="0.5"
|
||||
id="stop5056" /><stop
|
||||
id="stop5052"
|
||||
offset="1"
|
||||
style="stop-color:black;stop-opacity:0;" /></linearGradient><linearGradient
|
||||
id="linearGradient5060"
|
||||
inkscape:collect="always"><stop
|
||||
id="stop5062"
|
||||
offset="0"
|
||||
style="stop-color:black;stop-opacity:1;" /><stop
|
||||
id="stop5064"
|
||||
offset="1"
|
||||
style="stop-color:black;stop-opacity:0;" /></linearGradient><linearGradient
|
||||
id="linearGradient3449"><stop
|
||||
id="stop3451"
|
||||
offset="0.0000000"
|
||||
style="stop-color:#8a8a8a;stop-opacity:1.0000000;" /><stop
|
||||
id="stop3453"
|
||||
offset="1.0000000"
|
||||
style="stop-color:#484848;stop-opacity:1.0000000;" /></linearGradient><linearGradient
|
||||
id="linearGradient3441"><stop
|
||||
id="stop3443"
|
||||
offset="0"
|
||||
style="stop-color:#ffffff;stop-opacity:1;" /><stop
|
||||
style="stop-color:#ffffff;stop-opacity:0.21904762;"
|
||||
offset="0.50000000"
|
||||
id="stop3445" /><stop
|
||||
id="stop3447"
|
||||
offset="1.0000000"
|
||||
style="stop-color:#ffffff;stop-opacity:1.0000000;" /></linearGradient><linearGradient
|
||||
id="linearGradient3429"><stop
|
||||
style="stop-color:#ffffff;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop3431" /><stop
|
||||
style="stop-color:#ffffff;stop-opacity:0.24761905;"
|
||||
offset="1.0000000"
|
||||
id="stop3433" /></linearGradient><linearGradient
|
||||
id="linearGradient3423"><stop
|
||||
style="stop-color:#729fcf;stop-opacity:0.20784314;"
|
||||
offset="0.0000000"
|
||||
id="stop3425" /><stop
|
||||
style="stop-color:#729fcf;stop-opacity:0.67619050;"
|
||||
offset="1.0000000"
|
||||
id="stop3427" /></linearGradient><linearGradient
|
||||
id="linearGradient3415"><stop
|
||||
style="stop-color:#7d7d7d;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop3417" /><stop
|
||||
id="stop3419"
|
||||
offset="0.50000000"
|
||||
style="stop-color:#b1b1b1;stop-opacity:1.0000000;" /><stop
|
||||
style="stop-color:#686868;stop-opacity:1.0000000;"
|
||||
offset="1.0000000"
|
||||
id="stop3421" /></linearGradient><radialGradient
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="matrix(1,0,0,0.284916,0,30.08928)"
|
||||
r="15.821514"
|
||||
fy="42.07798"
|
||||
fx="24.306795"
|
||||
cy="42.07798"
|
||||
cx="24.306795"
|
||||
id="radialGradient3413"
|
||||
xlink:href="#linearGradient4542"
|
||||
inkscape:collect="always" /><linearGradient
|
||||
id="linearGradient3402"><stop
|
||||
style="stop-color:#fafafa;stop-opacity:1.0000000;"
|
||||
offset="0.0000000"
|
||||
id="stop3404" /><stop
|
||||
style="stop-color:#bbbbbb;stop-opacity:1.0000000;"
|
||||
offset="1.0000000"
|
||||
id="stop3406" /></linearGradient><linearGradient
|
||||
id="linearGradient3396"><stop
|
||||
style="stop-color:#a3a3a3;stop-opacity:1.0000000;"
|
||||
offset="0.0000000"
|
||||
id="stop3398" /><stop
|
||||
style="stop-color:#4c4c4c;stop-opacity:1.0000000;"
|
||||
offset="1.0000000"
|
||||
id="stop3400" /></linearGradient><radialGradient
|
||||
gradientUnits="userSpaceOnUse"
|
||||
fy="114.5684"
|
||||
fx="20.892099"
|
||||
r="5.256"
|
||||
cy="114.5684"
|
||||
cx="20.892099"
|
||||
id="radialGradient3390"><stop
|
||||
id="stop3392"
|
||||
style="stop-color:#F0F0F0"
|
||||
offset="0" /><stop
|
||||
id="stop3394"
|
||||
style="stop-color:#9a9a9a;stop-opacity:1.0000000;"
|
||||
offset="1.0000000" /></radialGradient><radialGradient
|
||||
gradientUnits="userSpaceOnUse"
|
||||
fy="64.567902"
|
||||
fx="20.892099"
|
||||
r="5.257"
|
||||
cy="64.567902"
|
||||
cx="20.892099"
|
||||
id="radialGradient3384"><stop
|
||||
id="stop3386"
|
||||
style="stop-color:#F0F0F0"
|
||||
offset="0" /><stop
|
||||
id="stop3388"
|
||||
style="stop-color:#9a9a9a;stop-opacity:1.0000000;"
|
||||
offset="1.0000000" /></radialGradient><linearGradient
|
||||
id="linearGradient3378"><stop
|
||||
style="stop-color:#ffffff;stop-opacity:1.0000000;"
|
||||
offset="0.0000000"
|
||||
id="stop3380" /><stop
|
||||
style="stop-color:#f8f8f8;stop-opacity:1.0000000;"
|
||||
offset="1.0000000"
|
||||
id="stop3382" /></linearGradient><linearGradient
|
||||
y2="609.50507"
|
||||
x2="302.85715"
|
||||
y1="366.64789"
|
||||
x1="302.85715"
|
||||
gradientTransform="matrix(2.774389,0,0,1.969706,-1892.179,-872.8854)"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
id="linearGradient3370"
|
||||
xlink:href="#linearGradient5048"
|
||||
inkscape:collect="always" /><linearGradient
|
||||
id="linearGradient3362"><stop
|
||||
id="stop3364"
|
||||
offset="0"
|
||||
style="stop-color:black;stop-opacity:0;" /><stop
|
||||
style="stop-color:black;stop-opacity:1;"
|
||||
offset="0.5"
|
||||
id="stop3366" /><stop
|
||||
id="stop3368"
|
||||
offset="1"
|
||||
style="stop-color:black;stop-opacity:0;" /></linearGradient><radialGradient
|
||||
r="117.14286"
|
||||
fy="486.64789"
|
||||
fx="605.71429"
|
||||
cy="486.64789"
|
||||
cx="605.71429"
|
||||
gradientTransform="matrix(2.774389,0,0,1.969706,-1891.633,-872.8854)"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
id="radialGradient3360"
|
||||
xlink:href="#linearGradient5060"
|
||||
inkscape:collect="always" /><radialGradient
|
||||
r="117.14286"
|
||||
fy="486.64789"
|
||||
fx="605.71429"
|
||||
cy="486.64789"
|
||||
cx="605.71429"
|
||||
gradientTransform="matrix(-2.774389,0,0,1.969706,112.7623,-872.8854)"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
id="radialGradient3352"
|
||||
xlink:href="#linearGradient5060"
|
||||
inkscape:collect="always" /><radialGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient4477"
|
||||
id="radialGradient3593"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="matrix(1,0,0,0.237968,0,28.93278)"
|
||||
cx="24.130018"
|
||||
cy="37.967922"
|
||||
fx="24.130018"
|
||||
fy="37.967922"
|
||||
r="16.528622" /><linearGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient2846"
|
||||
id="linearGradient3595"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
x1="27.366341"
|
||||
y1="26.580296"
|
||||
x2="31.335964"
|
||||
y2="30.557772" /><linearGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient4440"
|
||||
id="linearGradient3597"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="matrix(1.334593,0,0,1.291292,-6.973842,-7.460658)"
|
||||
x1="30.65625"
|
||||
y1="34"
|
||||
x2="33.21875"
|
||||
y2="31.0625" /><linearGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient2366"
|
||||
id="linearGradient3599"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
x1="18.292673"
|
||||
y1="13.602121"
|
||||
x2="17.500893"
|
||||
y2="25.743469" /><radialGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient4454"
|
||||
id="radialGradient3601"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
cx="18.240929"
|
||||
cy="21.817987"
|
||||
fx="18.240929"
|
||||
fy="21.817987"
|
||||
r="8.3085051" /><radialGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient4467"
|
||||
id="radialGradient3603"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="matrix(2.592963,0,0,2.252104,-25.05975,-18.941)"
|
||||
cx="15.414371"
|
||||
cy="13.078408"
|
||||
fx="15.414371"
|
||||
fy="13.078408"
|
||||
r="6.65625" /></defs><sodipodi:namedview
|
||||
inkscape:cy="22.58868"
|
||||
inkscape:cx="8"
|
||||
inkscape:zoom="8"
|
||||
inkscape:window-height="920"
|
||||
inkscape:window-width="1280"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
guidetolerance="10.0"
|
||||
gridtolerance="10.0"
|
||||
objecttolerance="10.0"
|
||||
borderopacity="1.0"
|
||||
bordercolor="#666666"
|
||||
pagecolor="#ffffff"
|
||||
id="base"
|
||||
height="13.546667mm"
|
||||
width="13.546667mm"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="25"
|
||||
inkscape:current-layer="Layer_1"
|
||||
units="mm" />
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<g
|
||||
id="g3605"
|
||||
transform="matrix(1.0683169,0,0,1.0683169,-4.1837749,-0.3623247)"><path
|
||||
style="fill:#ff3131"
|
||||
id="path21"
|
||||
d="M 146.01171,-12.604976 C 131.30382,-12.604976 109.78446,-24.588273 88.811657,-24.588273 C 67.838848,-24.588273 55.311721,-12.604976 53.130962,-10.970767 C 50.952923,-9.3365572 49.587909,-3.0743532 56.127467,-4.9804782 C 62.667024,-6.8866032 68.110763,-6.8866032 78.462569,-6.8866032 C 88.814376,-6.8866032 113.59944,7.2774532 138.65913,7.2774532 C 163.71882,7.2774532 184.68891,-12.058427 189.31962,-18.869899 C 193.95034,-25.681371 189.59154,-27.85941 186.0512,-26.222482 C 182.50815,-24.588273 159.63194,-12.604976 146.01171,-12.604976 z " /><path
|
||||
style="fill:#319831"
|
||||
id="path23"
|
||||
d="M 209.90631,24.707208 C 194.45338,24.707208 171.84636,12.726631 149.81308,12.726631 C 127.77981,12.726631 114.61639,24.707208 112.32687,26.344137 C 110.03734,27.981065 108.60436,34.24055 115.47293,32.334425 C 122.3415,30.428301 128.06259,30.428301 138.93648,30.428301 C 149.81037,30.428301 175.85167,44.592357 202.17577,44.592357 C 228.49986,44.592357 250.53586,25.256477 255.40042,18.447724 C 260.26498,11.636251 255.68593,9.4554924 251.96613,11.092421 C 248.24634,12.729349 224.21448,24.707208 209.90631,24.707208 z " /><path
|
||||
style="fill:#3232cc"
|
||||
id="path25"
|
||||
d="M 120.98193,65.880587 C 104.95797,65.880587 81.513456,52.93471 58.664434,52.93471 C 35.815411,52.93471 22.162557,65.880587 19.788738,67.645315 C 17.414919,69.410044 15.930264,76.175294 23.051719,74.114172 C 30.175893,72.05578 36.109079,72.05578 47.385397,72.05578 C 58.661714,72.05578 85.668318,87.353714 112.97131,87.353714 C 140.27158,87.353714 163.12061,66.467922 168.16735,59.112623 C 173.21409,51.757318 168.46374,49.405253 164.60526,51.169982 C 160.74679,52.93471 135.82034,65.880587 120.98193,65.880587 z " /></g>
|
||||
</svg>
|
After Width: | Height: | Size: 15 KiB |
88
subprojects/gst-devtools/debug-viewer/data/main-window.ui
Normal file
88
subprojects/gst-devtools/debug-viewer/data/main-window.ui
Normal file
@ -0,0 +1,88 @@
|
||||
<?xml version="1.0"?>
|
||||
<interface>
|
||||
<!-- interface-requires gtk+ 2.12 -->
|
||||
<!-- interface-naming-policy toplevel-contextual -->
|
||||
<object class="GtkWindow" id="main_window">
|
||||
<property name="title" translatable="yes">GStreamer Debug Viewer</property>
|
||||
<property name="default_width">640</property>
|
||||
<property name="default_height">480</property>
|
||||
<signal name="destroy" handler="handle_main_window_destroy"/>
|
||||
<child>
|
||||
<object class="GtkVBox" id="vbox_main">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<object class="GtkVBox" id="vbox_view">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<object class="GtkVPaned" id="vpaned_view">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<child>
|
||||
<object class="GtkHBox" id="hbox_view">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<object class="GtkScrolledWindow" id="log_view_scrolled_window">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="hscrollbar_policy">automatic</property>
|
||||
<property name="vscrollbar_policy">automatic</property>
|
||||
<property name="shadow_type">in</property>
|
||||
<child>
|
||||
<object class="GtkTreeView" id="log_view">
|
||||
<property name="name">log_view</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="reorderable">True</property>
|
||||
<property name="rules_hint">True</property>
|
||||
<property name="enable_search">False</property>
|
||||
<property name="fixed_height_mode">True</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="pack_type">end</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="resize">True</property>
|
||||
<property name="shrink">True</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkScrolledWindow" id="line_view_scrolled_window">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="hscrollbar_policy">never</property>
|
||||
<property name="vscrollbar_policy">automatic</property>
|
||||
<property name="shadow_type">in</property>
|
||||
<child>
|
||||
<object class="GtkTreeView" id="line_view">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="headers_visible">False</property>
|
||||
<property name="rules_hint">True</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="resize">False</property>
|
||||
<property name="shrink">True</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="pack_type">end</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
</interface>
|
82
subprojects/gst-devtools/debug-viewer/data/menus.ui
Normal file
82
subprojects/gst-devtools/debug-viewer/data/menus.ui
Normal file
@ -0,0 +1,82 @@
|
||||
<!-- -*- mode: xml; -*- -->
|
||||
<ui>
|
||||
<menubar>
|
||||
<menu name="AppMenu" action="AppMenuAction">
|
||||
<menuitem name="AppNewWindow" action="new-window"/>
|
||||
<menuitem name="WindowOpen" action="open-file"/>
|
||||
<menuitem name="WindowReload" action="reload-file"/>
|
||||
<separator/>
|
||||
<menuitem name="ShowAbout" action="show-about"/>
|
||||
<separator/>
|
||||
<menuitem name="WindowClose" action="close-window"/>
|
||||
</menu>
|
||||
<menu name="ViewMenu" action="ViewMenuAction">
|
||||
<menu name="ViewColumnsMenu" action="ViewColumnsMenuAction">
|
||||
<menuitem name="ViewColumnsTime" action="show-time-column"/>
|
||||
<menuitem name="ViewColumnsLevel" action="show-level-column"/>
|
||||
<menuitem name="ViewColumnsPid" action="show-pid-column"/>
|
||||
<menuitem name="ViewColumnsThread" action="show-thread-column"/>
|
||||
<menuitem name="ViewColumnsCode" action="show-code-column"/>
|
||||
<menuitem name="ViewColumnsCategory" action="show-category-column"/>
|
||||
<menuitem name="ViewColumnsFunction" action="show-function-column"/>
|
||||
<menuitem name="ViewColumnsObject" action="show-object-column"/>
|
||||
<menuitem name="ViewColumnsMessage" action="show-message-column"/>
|
||||
</menu>
|
||||
<placeholder name="ViewMenuAdditions"/>
|
||||
<separator/>
|
||||
<menuitem name="ViewContextMenuHideLevel" action="hide-log-level"/>
|
||||
<menuitem name="ViewContextMenuHideLevelAndAbove" action="hide-log-level-and-above"/>
|
||||
<menuitem name="ViewContextMenuShowOnlyLevel" action="show-only-log-level"/>
|
||||
<menuitem name="ViewContextMenuHideCategory" action="hide-log-category"/>
|
||||
<menuitem name="ViewContextMenuShowOnlyCategory" action="show-only-log-category"/>
|
||||
<menuitem name="ViewContextMenuHideThread" action="hide-thread"/>
|
||||
<menuitem name="ViewContextMenuShowOnlyThread" action="show-only-thread"/>
|
||||
<menuitem name="ViewContextMenuHideObject" action="hide-object"/>
|
||||
<menuitem name="ViewContextMenuShowOnlyObject" action="show-only-object"/>
|
||||
<menuitem name="ViewContextMenuHideFunction" action="hide-function"/>
|
||||
<menuitem name="ViewContextMenuShowOnlyFunction" action="show-only-function"/>
|
||||
<menuitem name="ViewContextMenuHideFilename" action="hide-filename"/>
|
||||
<menuitem name="ViewContextMenuShowOnlyFilename" action="show-only-filename"/>
|
||||
<menuitem name="ViewContextMenuHideBefore" action="hide-before-line"/>
|
||||
<menuitem name="ViewContextMenuHideAfter" action="hide-after-line"/>
|
||||
<menuitem name="ViewContextMenuShowHidden" action="show-hidden-lines"/>
|
||||
<separator/>
|
||||
<menuitem name="ViewContextMenuCopyMessage" action="edit-copy-message"/>
|
||||
<menuitem name="ViewContextMenuCopyLine" action="edit-copy-line"/>
|
||||
<separator/>
|
||||
<menuitem name="ZoomIn" action="enlarge-text"/>
|
||||
<menuitem name="ZoomOut" action="shrink-text"/>
|
||||
<menuitem name="Zoom100" action="reset-text"/>
|
||||
</menu>
|
||||
</menubar>
|
||||
<menubar name="context">
|
||||
<menu name="LogViewContextMenu" action="ViewMenuAction">
|
||||
<placeholder name="LogViewContextMenuAdditions"/>
|
||||
<separator/>
|
||||
<menuitem name="ViewContextMenuSetBaseTime" action="set-base-time"/>
|
||||
<menuitem name="ViewContextMenuHideLevel" action="hide-log-level"/>
|
||||
<menuitem name="ViewContextMenuHideLevelAndAbove" action="hide-log-level-and-above"/>
|
||||
<menuitem name="ViewContextMenuShowOnlyLevel" action="show-only-log-level"/>
|
||||
<menuitem name="ViewContextMenuHideCategory" action="hide-log-category"/>
|
||||
<menuitem name="ViewContextMenuShowOnlyCategory" action="show-only-log-category"/>
|
||||
<menuitem name="ViewContextMenuHideThread" action="hide-thread"/>
|
||||
<menuitem name="ViewContextMenuShowOnlyThread" action="show-only-thread"/>
|
||||
<menuitem name="ViewContextMenuHideObject" action="hide-object"/>
|
||||
<menuitem name="ViewContextMenuShowOnlyObject" action="show-only-object"/>
|
||||
<menuitem name="ViewContextMenuHideFunction" action="hide-function"/>
|
||||
<menuitem name="ViewContextMenuShowOnlyFunction" action="show-only-function"/>
|
||||
<menuitem name="ViewContextMenuHideFilename" action="hide-filename"/>
|
||||
<menuitem name="ViewContextMenuShowOnlyFilename" action="show-only-filename"/>
|
||||
<menuitem name="ViewContextMenuHideBefore" action="hide-before-line"/>
|
||||
<menuitem name="ViewContextMenuHideAfter" action="hide-after-line"/>
|
||||
<menuitem name="ViewContextMenuShowHidden" action="show-hidden-lines"/>
|
||||
<separator/>
|
||||
<menuitem name="ViewContextMenuCopyMessage" action="edit-copy-message"/>
|
||||
<menuitem name="ViewContextMenuCopyLine" action="edit-copy-line"/>
|
||||
</menu>
|
||||
<menu name="LineViewContextMenu" action="LineViewContextMenuAction">
|
||||
<menuitem name="LineViewContextMenuClear" action="clear-line-view"/>
|
||||
<placeholder name="LineViewContextMenuAdditions"/>
|
||||
</menu>
|
||||
</menubar>
|
||||
</ui>
|
6
subprojects/gst-devtools/debug-viewer/data/meson.build
Normal file
6
subprojects/gst-devtools/debug-viewer/data/meson.build
Normal file
@ -0,0 +1,6 @@
|
||||
install_data('about-dialog.ui', 'main-window.ui', 'menus.ui', 'gst-debug-viewer.png',
|
||||
install_dir: join_paths(get_option('datadir'), 'gst-debug-viewer'))
|
||||
install_data('gst-debug-viewer.png',
|
||||
install_dir: join_paths(get_option('datadir'), 'icons/hicolor/48x48/apps'))
|
||||
install_data('gst-debug-viewer.svg',
|
||||
install_dir: join_paths(get_option('datadir'), 'icons/hicolor/scalable/apps'))
|
71
subprojects/gst-devtools/debug-viewer/gst-debug-viewer
Executable file
71
subprojects/gst-devtools/debug-viewer/gst-debug-viewer
Executable file
@ -0,0 +1,71 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8; mode: python; -*-
|
||||
#
|
||||
# GStreamer Debug Viewer - View and analyze GStreamer debug log files
|
||||
#
|
||||
# Copyright (C) 2007 René Stadler <mail@renestadler.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""GStreamer Debug Viewer program invocation."""
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
import sys
|
||||
import os.path
|
||||
|
||||
def substituted(s):
|
||||
if s.startswith("@") and s.endswith("@"):
|
||||
return None
|
||||
else:
|
||||
return s
|
||||
|
||||
# These "$"-enclosed strings are substituted at install time by a custom
|
||||
# distutils extension (see setup.py). If you don't see any dollar signs at
|
||||
# all, you are looking at an installed version of this file.
|
||||
data_dir = substituted("@DATADIR@")
|
||||
lib_dir = substituted("@LIBDIR@")
|
||||
|
||||
if data_dir:
|
||||
installed = True
|
||||
else:
|
||||
# Substitution has not been run, we are running within a development
|
||||
# environment:
|
||||
lib_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
|
||||
installed = False
|
||||
|
||||
if lib_dir:
|
||||
if not os.path.normpath(lib_dir) in [os.path.normpath(p)
|
||||
for p in sys.path]:
|
||||
sys.path.insert(0, lib_dir)
|
||||
|
||||
try:
|
||||
import GstDebugViewer
|
||||
except ImportError as exc:
|
||||
print(str(exc), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
if installed:
|
||||
GstDebugViewer.Paths.setup_installed(data_dir)
|
||||
else:
|
||||
# Assume that we reside inside the source dist.
|
||||
source_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
|
||||
GstDebugViewer.Paths.setup_devenv(source_dir)
|
||||
|
||||
GstDebugViewer.run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
84
subprojects/gst-devtools/debug-viewer/meson.build
Normal file
84
subprojects/gst-devtools/debug-viewer/meson.build
Normal file
@ -0,0 +1,84 @@
|
||||
python3.install_sources (
|
||||
'GstDebugViewer/Main.py',
|
||||
'GstDebugViewer/Data.py',
|
||||
subdir: 'GstDebugViewer')
|
||||
|
||||
python3.install_sources (
|
||||
'GstDebugViewer/GUI/columns.py',
|
||||
'GstDebugViewer/GUI/__init__.py',
|
||||
'GstDebugViewer/GUI/models.py',
|
||||
'GstDebugViewer/GUI/filters.py',
|
||||
'GstDebugViewer/GUI/colors.py',
|
||||
'GstDebugViewer/GUI/window.py',
|
||||
'GstDebugViewer/GUI/app.py',
|
||||
subdir: 'GstDebugViewer/GUI')
|
||||
|
||||
python3.install_sources (
|
||||
'GstDebugViewer/Plugins/__init__.py',
|
||||
'GstDebugViewer/Plugins/FindBar.py',
|
||||
'GstDebugViewer/Plugins/Timeline.py',
|
||||
subdir: 'GstDebugViewer/Plugins')
|
||||
|
||||
python3.install_sources (
|
||||
'GstDebugViewer/Common/Main.py',
|
||||
'GstDebugViewer/Common/utils.py',
|
||||
'GstDebugViewer/Common/__init__.py',
|
||||
'GstDebugViewer/Common/generictreemodel.py',
|
||||
'GstDebugViewer/Common/Data.py',
|
||||
'GstDebugViewer/Common/GUI.py',
|
||||
subdir: 'GstDebugViewer/Common')
|
||||
|
||||
if find_program('msgfmt', required : get_option('nls')).found()
|
||||
# Desktop launcher and description file.
|
||||
desktop_file = i18n.merge_file(
|
||||
input: 'org.freedesktop.GstDebugViewer.desktop.in',
|
||||
output: 'org.freedesktop.GstDebugViewer.desktop',
|
||||
type: 'desktop',
|
||||
po_dir: 'po',
|
||||
install: true,
|
||||
install_dir: join_paths(get_option('datadir'), 'applications'),
|
||||
)
|
||||
|
||||
# Appdata file.
|
||||
appdata_file = i18n.merge_file(
|
||||
input: 'org.freedesktop.GstDebugViewer.appdata.xml.in',
|
||||
output: 'org.freedesktop.GstDebugViewer.appdata.xml',
|
||||
po_dir: 'po',
|
||||
install: true,
|
||||
install_dir: join_paths(get_option('datadir'), 'metainfo'),
|
||||
)
|
||||
else
|
||||
install_data('org.freedesktop.GstDebugViewer.desktop.in',
|
||||
rename: 'org.freedesktop.GstDebugViewer.desktop',
|
||||
install_dir: join_paths(get_option('datadir'), 'applications'))
|
||||
install_data('org.freedesktop.GstDebugViewer.appdata.xml.in',
|
||||
rename: 'org.freedesktop.GstDebugViewer.appdata.xml',
|
||||
install_dir: join_paths(get_option('datadir'), 'metainfo'))
|
||||
endif
|
||||
|
||||
cdata = configuration_data()
|
||||
cdata.set('LIBDIR', join_paths(get_option('prefix'), get_option('libdir')))
|
||||
cdata.set('DATADIR', join_paths(get_option('prefix'), get_option('datadir')))
|
||||
cdata.set('VERSION', meson.project_version())
|
||||
|
||||
configure_file(input: 'gst-debug-viewer',
|
||||
output: 'gst-debug-viewer',
|
||||
configuration: cdata,
|
||||
install_dir: get_option('bindir'))
|
||||
|
||||
init_file = configure_file(
|
||||
input: 'GstDebugViewer/__init__.py',
|
||||
output: '__init__.py',
|
||||
configuration: cdata)
|
||||
python3.install_sources (init_file, subdir: 'GstDebugViewer')
|
||||
|
||||
pkgdatadir = join_paths(get_option('datadir'), meson.project_name())
|
||||
icondir = join_paths(get_option('datadir'), 'icons/hicolor')
|
||||
|
||||
subdir('data')
|
||||
|
||||
|
||||
if run_command(python3, '-c', 'import gi; gi.require_version("Gtk", "3.0")', check: false).returncode() == 0
|
||||
test('gst-debug-viewer', python3, args: ['-m', 'unittest'],
|
||||
workdir: meson.current_source_dir())
|
||||
endif
|
@ -0,0 +1,29 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<component type="desktop-application">
|
||||
<id>org.freedesktop.GstDebugViewer</id>
|
||||
<launchable type="desktop-id">org.freedesktop.GstDebugViewer.desktop</launchable>
|
||||
<metadata_license>CC-BY-3.0</metadata_license>
|
||||
<project_license>GPL-3.0+</project_license>
|
||||
<name>GStreamer Debug Viewer</name>
|
||||
<summary>Examine GStreamer debug log information</summary>
|
||||
<description>
|
||||
<p>View and read GStreamer debug logs in an efficient way</p>
|
||||
</description>
|
||||
<url type="homepage">https://gstreamer.freedesktop.org/</url>
|
||||
<url type="bugtracker">https://gitlab.freedesktop.org/gstreamer/gst-devtools/issues/</url>
|
||||
<update_contact>tsaunier@gnome.org</update_contact>
|
||||
<project_group>GStreamer</project_group>
|
||||
<translation type="gettext">GStreamer</translation>
|
||||
<developer_name>The GStreamer Team</developer_name>
|
||||
<screenshots>
|
||||
<screenshot type="default">
|
||||
<caption>The main window</caption>
|
||||

|
||||
</screenshot>
|
||||
</screenshots>
|
||||
<content_rating type="oars-1.0" />
|
||||
<releases>
|
||||
<release version="1.16.2" date="2019-12-03" />
|
||||
<release version="1.16.1" date="2019-09-23" />
|
||||
</releases>
|
||||
</component>
|
@ -0,0 +1,9 @@
|
||||
[Desktop Entry]
|
||||
Name=GStreamer Debug Viewer
|
||||
Comment=Examine GStreamer debug log information
|
||||
StartupNotify=true
|
||||
Exec=gst-debug-viewer
|
||||
Icon=gst-debug-viewer
|
||||
Type=Application
|
||||
Categories=GNOME;Development
|
||||
|
Binary file not shown.
After Width: | Height: | Size: 2.0 KiB |
0
subprojects/gst-devtools/debug-viewer/po/LINGUAS
Normal file
0
subprojects/gst-devtools/debug-viewer/po/LINGUAS
Normal file
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user