diff --git a/.travis.yml b/.travis.yml index 5de3ac9..8df2354 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,21 @@ language: python python: - - "2.7" -before_install: - - sudo apt-get update -qq - - sudo apt-get install -qq diffstat +- '2.7' +- '3.3' +- '3.4' +- 3.5-dev +addons: + apt: + packages: + - diffstat +sudo: false script: cd tests; python suite.py +deploy: + provider: pypi + user: suse + password: + secure: R4+YNPW2tsiY06hibGvONYn0//1z1QdcY8VmNbYpIRly4eTAbPE9uejKpyuflUkznpEkoqCdFzi5FNFhgat9N+AkIKyX9NTkf0oxaKKbdqBM7H1V8bqLYlAO479262spRyO0ee5fV5v6g81AFjncIV+pGjtQ0Vg/sjVcvGa61bs= + on: + tags: true + distributions: sdist bdist_wheel + repo: openSUSE/osc diff --git a/NEWS b/NEWS index 4ba2231..ae9a901 100644 --- a/NEWS +++ b/NEWS @@ -1,3 +1,113 @@ +0.158 + - cat/less/blame command: default to expand to stay in sync with checkout + - add support for highly experimental native appimage.yml support + +0.157.2 + - add compat code for older apis that do not support multibuild + - fix ssl.connection instantiation in case of old M2Crypto + - highly experimental support for appimage + - fix local service run for non-existent (server-side) package + +0.157 + - add unpublish command (requires OBS 2.8) + - add blame command (requires OBS 2.9) + - results: show multibuild results by default + - getbinaries: make .AppImage files executable + - support operation in checked out package for cat/less/blame + - add comment command + - improved build recipe selection + - added multibuild (-M) to commands: + buildlog, remotebuildlog, buildinfo, build, buildhistory, jobhistory, + rebuild, restartbuild/abortbuild, wipebinaries, getbinaries + - add checkconstraints command + - add workerinfo command + - fix local build of kiwi images using obsrepositories:// + +0.156 + - highlight scheduled jobs with dispatch problems (due to constraints) + - allow to specify a log message in lock command + - Add '--wipe' option to osc chroot for deleting buildroots + - Add '--vm-memory' option to osc build + - add --multibuild to results command for showing all packages + of a multibuild config + - add --multibuild-package option to results command for showing + only one package of the multibuild config + +0.155 + - osc service runall: runs all service local, independend of the mode + This allows to create local files even when disabled or + just created at buildtime + - support snapcraft.yaml build descriptions + - "osc add" of a directory offers to create an archive for uploading it + - support usage or preinstall images in local builds + - revision control options for "meta prj" + - bugfixes and documentation updates + +0.154 + - switch to new obs_scm service when adding git URL's + - set OSC_VERSION environment for source services + (allows to work in local git checkouts when using obs_scm) + +0.153 + - "my sr" is using the server side request collection to get right results + - maintenance request offers to supersede old, but still open requests + - add build --vm-telnet option for getting debug shell in KVM builds + - add buildhistory --limit option + OBS 2.7 only: + - add "addchannels" and "enablechannel" commands + - support new package instances on branching when using -N parameter + - add --linkrev option to branch command + - add --add-repository-block option to branch command + - add --add-repository-rebuild option to branch command + - add service merge command + - add service wait command + +0.152 + - add support searching for groups via "group:" prefix + - show possible used incident projects on "maintained" command + OBS 2.7 only: + - support buildtime source services + - support maintenance_incident requests with acceptinfo data + - support maintenance_release requests with acceptinfo data + +0.151 + - fixed shell command injection via crafted _service files (CVE-2015-0778) + - fix times when data comes from OBS backend + - support updateing the link in target package for submit requests + - various minor bugfixes + +0.150 + - support local builds using builenv (for same build environment as a former build) + - add "osc api --edit" option to be able to edit some meta files directly + - follow the request order of the api (sorting according to priorization) + - add mr --release-project option for kgraft updates + - add support for makeoriginolder in request + +0.149 + - removed "--diff" option from the "createrequest" command + - introduced new "vc-cmd" config option, which is used to specify the path + to the vc script + - various bugfixes + +0.148 + - support new history including review history of OBS 2.6 + - display request priorities, if important or critical + - add "osc rq priorize" command to re-priorize existing requests + - allow also "osc rq ls" shortcut + - fish shell completion support + +0.147 + - support groups in maintainership requests + - fixing listing of review requests + - support expanded package listing (when using project links) + - fixing "osc add git://" behaviour + - using xz as default compression + - support local debian live (image) build format + - handle ppc64le for debian as well + - fix buildlog --strip-time + - some more minor bugfixes + - speedup update of a project working copy (in some cases) + 0.146 - support maintenance release request with acceptinfo data (OBS 2.6) - setlinkrev can be used to update frozen links to current revisions again diff --git a/README b/README index 3ea39b4..7b1e344 100644 --- a/README +++ b/README @@ -13,7 +13,7 @@ INSTALLATION: RPM packages are here (rpm-md repository): http://download.opensuse.org/repositories/openSUSE:/Tools/ -To install from svn, do +To install from git, do python setup.py build python setup.py install diff --git a/dist/complete.sh b/dist/complete.sh index 77156ad..6148b40 100644 --- a/dist/complete.sh +++ b/dist/complete.sh @@ -1,7 +1,6 @@ test -z "$BASH_VERSION" && return complete -o default _nullcommand >/dev/null 2>&1 || return complete -r _nullcommand >/dev/null 2>&1 || return -COMP_WORDBREAKS="${COMP_WORDBREAKS//:}" test -s /usr/share/osc/complete && complete -o default -C /usr/share/osc/complete osc test -s /usr/lib64/osc/complete && complete -o default -C /usr/lib64/osc/complete osc test -s /usr/lib/osc/complete && complete -o default -C /usr/lib/osc/complete osc diff --git a/dist/osc.complete b/dist/osc.complete old mode 100644 new mode 100755 index 173699f..bf4f606 --- a/dist/osc.complete +++ b/dist/osc.complete @@ -6,7 +6,6 @@ # # usage with bash # -# COMP_WORDBREAKS="${COMP_WORDBREAKS//:}" # complete -C osc.complete osc # # Author: Werner Fink @@ -35,14 +34,19 @@ if test "/proc/$PPID/exe" -ef /bin/tcsh ; then let colon=0 else COMMAND_LINE="${COMP_LINE:0:$COMP_POINT}" -# let colon=1 let colon=0 + case "$COMP_WORDBREAKS" in + *:*) let colon=1 + esac [[ $COMMAND_LINE =~ \\: ]] && COMMAND_LINE="${COMMAND_LINE//\\:/:}" fi IFS="${IFS}=" cmdline=($COMMAND_LINE) IFS="$OIFS" -test "${cmdline[0]}" != "osc" && exit 1 +case "${cmdline[0]}" in +iosc|isc|osc) ;; +*) exit 1 +esac let last=${#COMMAND_LINE} let last-- @@ -55,8 +59,8 @@ oscopts=(--version --help --debugger --post-mortem --traceback --http-full-debug --debug --apiurl -A --config -c --no-keyring --no-gnome-keyring --verbose --quiet) osccmds=(abortbuild add addremove aggregatepac api ar bco bl blt branch branchco bsdevelproject bse bugowner build buildconfig buildhist buildhistory buildinfo - buildlog buildlogtail cat changedevelreq changedevelrequest checkin checkout - chroot ci co commit config copypac cr createincident createrequest creq del + buildlog buildlogtail cat changedevelreq changedevelrequest checkconstraints checkin checkout + chroot ci co comment commit config copypac cr createincident createrequest creq del delete deletereq deleterequest dependson detachbranch develproject di diff distributions dists dr dropreq droprequest getbinaries getpac help importsrcpkg info init jobhist jobhistory lbl ldiff less linkdiff linkpac linktobranch list @@ -69,34 +73,76 @@ osccmds=(abortbuild add addremove aggregatepac api ar bco bl blt branch branchco resolved results revert review rm rq rremove se search service setlinkrev signkey sm sr st status submitpac submitreq submitrequest tr triggerreason undelete unlock up update updatepacmetafromspec user vc whatdependson who whois - wipebinaries) + wipebinaries workerinfo) oscreq=(list log show accept decline revoke reopen setincident supersede approvenew checkout clone) oscrev=(show list add accept decline reopen supersede) oscmy=(work pkg prj rq sr) +osccmt=(list create delete) +osccmtkind=(package project request) oscprj="" oscpkg="" lnkprj="" lnkpkg="" +apiurl="" +alias="" test -s ${PWD}/.osc/_project && read -t 1 oscprj < ${PWD}/.osc/_project test -s ${PWD}/.osc/_package && read -t 1 oscpkg < ${PWD}/.osc/_package if test -s ${PWD}/.osc/_files ; then lnkprj=$(command sed -rn '/ /dev/null) +fi +if test "${cmdline[0]}" = isc ; then + alias=internal +fi + +projects=~/.osc.projects +command=osc -if test -s ~/.osc.projects ; then - typeset -i ctime=$(command date -d "$(command stat -c '%z' ~/.osc.projects)" +'%s') +case "${cmdline[1]}" in +-A|--apiurl) + if test -n "${cmdline[2]}" -a -s ~/.oscrc ; then + hints=($(sed -rn '/^(aliases=|\[http)/{s/,/ /g;s/(aliases=|\[|\])//gp}' < ~/.oscrc 2> /dev/null)) + for h in ${hints[@]} ; do + case "$h" in + http*) + tmp=$(sed -rn '\@^\['${h}'@,\@=@{\@^aliases=@{s@[^=]+=([^,]+),.*@\1@p};}' < ~/.oscrc 2> /dev/null) + if test "${cmdline[2]}" = "$h" ; then + alias=$tmp + break + fi + ;; + *) + if test "${cmdline[2]}" = "$h" ; then + alias=$h + break + fi + esac + done + fi +esac + +if test -n "$alias" ; then + projects="${projects}.${alias}" + command="$command -A $alias" +fi + +if test -s "${projects}" ; then + typeset -i ctime=$(command date -d "$(command stat -c '%z' ${projects})" +'%s') typeset -i now=$(command date -d now +'%s') if ((now - ctime > 86400)) ; then - if tmp=$(mktemp ~/.osc.projects.XXXXXX) ; then - command osc ls / >| $tmp - mv -uf $tmp ~/.osc.projects + if tmp=$(mktemp ${projects}.XXXXXX) ; then + command ${command} ls / >| $tmp + mv -uf $tmp ${projects} fi fi else - command osc ls / >| ~/.osc.projects + command ${command} ls / >| "${projects}" fi projects () @@ -104,7 +150,7 @@ projects () local -a list local -a argv local -i argc=0 - local arg + local arg cur for arg; do if test $arg == "--" ; then let argc++ @@ -113,15 +159,18 @@ projects () argv[argc++]=$arg done shift $argc - if test -n "$1" ; then - list=($(command grep -E "^$1" ~/.osc.projects)) + cur="$1" + if test -n "${cur}" ; then + list=($(command grep -E "^${cur}" ${projects})) else - list=($(command cat ~/.osc.projects)) + list=($(command cat ${projects})) fi if ((colon)) ; then - builtin compgen -W "${list[*]}" "$1"|sed -r 's@([^\\]):@\1\\:@g' + local colon_word + colon_word=${cur%${cur##*:}} + builtin compgen -W "${list[*]}" -- "${cur}" | sed -r "s@^${colon_word}@@g" else - builtin compgen -W "${list[*]}" -- ${1+"$@"} + builtin compgen -W "${list[*]}" -- "${cur}" fi } @@ -130,7 +179,7 @@ packages () local -a list local -a argv local -i argc=0 - local arg + local arg cur for arg; do if test $arg == "--" ; then let argc++ @@ -139,12 +188,13 @@ packages () argv[argc++]=$arg done shift $argc - if test -n "$1" ; then - list=($(command osc ls ${argv[@]}|command grep -E "^$1")) + cur="$1" + if test -n "${cur}" ; then + list=($(command ${command} ls ${argv[@]}|command grep -E "^${cur}")) else - list=($(command osc ls ${argv[@]})) + list=($(command ${command} ls ${argv[@]})) fi - builtin compgen -W "${list[*]}" -- ${1+"$@"} + builtin compgen -W "${list[*]}" -- "${cur}" } repositories () @@ -162,11 +212,11 @@ repositories () done shift $argc if test -n "$1" ; then - list=($(command osc meta prj ${argv[@]}|\ + list=($(command ${command} meta prj ${argv[@]}|\ command sed -rn '//{s@^\s*(.*)@\1@p}'|\ command sort -u|command grep -E "^$1")) else - list=($(command osc meta prj ${argv[@]}|\ + list=($(command ${command} meta prj ${argv[@]}|\ command sed -rn '//{s@^\s*(.*)@\1@p}'|\ command sort -u)) fi @@ -223,8 +273,8 @@ targets () users () { - if test -s ~/.osc.projects ; then - command sed -rn "/^home:$1/{ s/^home:([^:]*):.*/\1/p}" ~/.osc.projects|command sort -u + if test -s ${projects} ; then + command sed -rn "/^home:$1/{ s/^home:([^:]*):.*/\1/p}" ${projects}|command sort -u elif test -s ~/.oscrc; then command sed -rn '/^(user=)/{s/(user=)//p}' ~/.oscrc|command sort -u else @@ -350,7 +400,7 @@ add|addremove|ar) fi ;; build) - opts=(--help --oldpackages --disable-cpio-bulk-download --download-api-only --release --baselibs + opts=(--help --oldpackages --disable-cpio-bulk-download --release --baselibs --disable-debuginfo --debuginfo --alternative-project --vm-type --linksources --local-package --build-uid --userootforbuild --define --without --with --ccache --icecream --jobs --root --extra-pkgs --keep-pkgs --prefer-pkgs @@ -437,9 +487,8 @@ build) done fi if ((count == 2)) ; then - specs=($(command ls *.spec 2>/dev/null)) - images=($(command ls *.kiwi 2>/dev/null)) - builtin compgen -W "${opts[*]} ${specs[*]} ${images[*]}" -- "${cmdline[count]}" + specs=($(command ls *.spec)) + builtin compgen -W "${opts[*]} ${specs[*]}" -- "${cmdline[count]}" fi ;; branch|getpac|bco|branchco) @@ -588,7 +637,7 @@ less|cat) fi ;; sr|submitpac|submitreq|submitrequest) - opts=(--help --yes --diff --no-update --no-cleanup --cleanup --seperate-requests + opts=(--help --yes --diff --no-update --no-cleanup --cleanup --separate-requests --nodevelproject --supersede --revision) if ((count == 1)) ; then builtin compgen -W "${osccmds[*]}" -- "${cmdline[count]}" @@ -1048,6 +1097,16 @@ my) builtin compgen -W "${opts[*]}" -- "${cmdline[3]}" fi ;; +comment) + opts=(--comment --parent) + if ((count == 1)) ; then + builtin compgen -W "${osccmds[*]}" -- "${cmdline[count]}" + elif ((count == 2)) ; then + builtin compgen -W "${opts[*]} ${osccmt[*]}" -- "${cmdline[2]}" + elif ((count == 3)) ; then + builtin compgen -W "${opts[*]} ${osccmtkind[*]}" -- "${cmdline[3]}" + fi + ;; copypac|linkpac) opts=(--help --expand --to-apiurl --revision --keep-develproject --keep-link --keep-maintainers --client-side-copy) @@ -1349,7 +1408,7 @@ maintainer) opts=(--help --role --delete --set-bugowner-request --set-bugowner --all --add --devel-project --verbose --nodevelproject --email --bugowner --bugowner-only) if ((count == 1)) ; then - builtin compgen -W "${osccmds[*]}" -- "${cmdline[count]}" + builtin compgen -W "${osccmds[*]}" -- "${cmdline[count]}" elif ((count >= 2)) ; then for ((off=2; off<=count; off++)) ; do while test "${cmdline[off+remove]::1}" = "-" ; do @@ -1765,6 +1824,30 @@ diff|linkdiff) builtin compgen -W "${opts[*]}" -- "${cmdline[count]}" fi ;; +workerinfo) + opts=(--help) + if ((count == 1)) ; then + builtin compgen -W "${osccmds[*]} ${oscopts[*]}" -- "${cmdline[count]}" + elif ((count >= 2)) ; then + if test "${cmdline[count]::1}" = "-" ; then + builtin compgen -W "${opts[*]}" -- "${cmdline[count]}" + else + targets ${opts[*]} -- "${cmdline[count]}" + fi + fi + ;; +checkconstraints) + opts=(--help --ignore-file) + if ((count == 1)) ; then + builtin compgen -W "${osccmds[*]} ${oscopts[*]}" -- "${cmdline[count]}" + elif ((count >= 2)) ; then + if test "${cmdline[count]::1}" = "-" ; then + builtin compgen -W "${opts[*]}" -- "${cmdline[count]}" + else + targets ${opts[*]} -- "${cmdline[count]}" + fi + fi + ;; *) opts=(--help) if ((count == 1)) ; then @@ -1775,5 +1858,5 @@ diff|linkdiff) else targets ${opts[*]} -- "${cmdline[count]}" fi - fi + fi esac diff --git a/docs/_static/.keepme b/docs/_static/.keepme new file mode 100644 index 0000000..e69de29 diff --git a/docs/api/modules.rst b/docs/api/modules.rst new file mode 100644 index 0000000..26c0985 --- /dev/null +++ b/docs/api/modules.rst @@ -0,0 +1,10 @@ +osc +=== + +These are the packages in the osc package. + +.. toctree:: + :maxdepth: 4 + + osc.core + osc.util diff --git a/docs/api/osc.core.rst b/docs/api/osc.core.rst new file mode 100644 index 0000000..b4840ba --- /dev/null +++ b/docs/api/osc.core.rst @@ -0,0 +1,20 @@ +.. py:module:: osc.core + +core +==== + +This is the osc core module. + +basic structures +---------------- + +.. autoclass:: File + :members: + + +.. autoclass:: Serviceinfo + :members: + + +.. autoclass:: Linkinfo + :members: diff --git a/docs/api/osc.util.rst b/docs/api/osc.util.rst new file mode 100644 index 0000000..e00bd62 --- /dev/null +++ b/docs/api/osc.util.rst @@ -0,0 +1,78 @@ +osc.util package +================ + +Submodules +---------- + +osc.util.ar module +------------------ + +.. automodule:: osc.util.ar + :members: + :undoc-members: + :show-inheritance: + +osc.util.archquery module +------------------------- + +.. automodule:: osc.util.archquery + :members: + :undoc-members: + :show-inheritance: + +osc.util.cpio module +-------------------- + +.. automodule:: osc.util.cpio + :members: + :undoc-members: + :show-inheritance: + +osc.util.debquery module +------------------------ + +.. automodule:: osc.util.debquery + :members: + :undoc-members: + :show-inheritance: + +osc.util.packagequery module +---------------------------- + +.. automodule:: osc.util.packagequery + :members: + :undoc-members: + :show-inheritance: + +osc.util.repodata module +------------------------ + +.. automodule:: osc.util.repodata + :members: + :undoc-members: + :show-inheritance: + +osc.util.rpmquery module +------------------------ + +.. automodule:: osc.util.rpmquery + :members: + :undoc-members: + :show-inheritance: + +osc.util.safewriter module +-------------------------- + +.. automodule:: osc.util.safewriter + :members: + :undoc-members: + :show-inheritance: + + +Module contents +--------------- + +.. automodule:: osc.util + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/api/tutorial.rst b/docs/api/tutorial.rst new file mode 100644 index 0000000..01a440d --- /dev/null +++ b/docs/api/tutorial.rst @@ -0,0 +1,96 @@ +Tutorial +======== + +This is a tutorial on how to use the osc python api. + +Key to the |obs| are (remote): + + #. A **project** + #. A project has associated multiple **repositories** (linux distributions) + #. Multiple **packages** in a project will hold the builds against the difefrent **repositories** + + +A user will deal with local checkout of a project in a **working copy**: this is similar to the +subversion checkout model. + + +Initial config setup +-------------------- + +Osc the library requires an initial setup: + + >>> import osc.conf + >>> osc.conf.get_config() + +This will read all the external config files (eg. ~/.oscrc) and the internal configuration +values. + + +Acquiring the apiurl +-------------------- + +All the osc operation will use a **apiurl** to lookup for things like passwords, username and other parameters +while performing operations: + + >>> apiurl = osc.conf.config['apiurl'] + + +Operations on a remote build server +----------------------------------- + +osc is similar to subversion, it has a remote server and a local (checkout) **working** directory. +First we'll go through the remote operation on a server **NOT** requiring a checkout. +Operations are contained in the osc.core module: + + >>> import osc.core + + +List all the projects and packages +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This will show all the projects on the remote |obs|: + + >>> for prj in osc.core.meta_get_project_list(apiurl, deleted=False): + print prj + + +A project has **repositories** associated with it (eg. linux distributions): + + >>> prj = 'home:cavallo71:opt-python-interpreters' + >>> for repo in osc.core.get_repos_of_project(apiurl, prj): + print repo + + +A project contains packages and to list them all: + + >>> prj = 'home:cavallo71:opt-python-interpreters' + >>> for pkg in osc.core.meta_get_packagelist(apiurl, prj): + print pkg + + +Add a package to an existing project +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +Operations in a checked out **working copy** +-------------------------------------------- + + + +Create your first project: the hello project +-------------------------------------------- + +.. todo:: add he description on how to init a project + + +Adding your firs package to the project hello: the world package +---------------------------------------------------------------- + +.. todo:: add he description on how to add a package + + + +Setting the build architectures +------------------------------- + + diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..a3c582a --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,298 @@ +# -*- coding: utf-8 -*- +# +# osc documentation build configuration file, created by +# sphinx-quickstart on Sun Jan 24 13:06:29 2016. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + + +# top level dir (one above this file) +topdir = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')) + + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, topdir) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.todo', + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.ifconfig', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'osc' +copyright = u'2016, see authors list' +author = u'see authors list' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '1.2.3' +# The full version, including alpha/beta/rc tags. +release = '4.5.6' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'oscdoc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', + +# Latex figure (float) alignment +#'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'osc.tex', u'osc Documentation', + u'see authors list', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'osc', u'osc Documentation', + [author], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'osc', u'osc Documentation', + author, 'osc', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + +rst_epilog = """ +.. |obs| replace:: open build service +""" diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..2fcde0a --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,33 @@ +.. osc documentation master file, created by + sphinx-quickstart on Sun Jan 24 13:06:29 2016. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to osc's documentation! +=============================== + +This is the documentation for the osc python client to the |obs|. + +Tutorial + +.. TODO:: add more documentation + + + +API: + +.. toctree:: + :maxdepth: 2 + + api/tutorial + api/modules + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/osc-wrapper.py b/osc-wrapper.py index 820473f..833729e 100755 --- a/osc-wrapper.py +++ b/osc-wrapper.py @@ -5,6 +5,7 @@ import locale import sys +import os from osc import commandline, babysitter @@ -21,6 +22,20 @@ #reload, neither setdefaultencoding are in python3 pass +# avoid buffering output on pipes (bnc#930137) +# Basically, a "print('foo')" call is translated to a corresponding +# fwrite call that writes to the stdout stream (cf. string_print +# (Objects/stringobject.c) and builtin_print (Python/bltinmodule.c)); +# If no pipe is used, stdout is a tty/refers to a terminal => +# the stream is line buffered (see _IO_file_doallocate (libio/filedoalloc.c)). +# If a pipe is used, stdout does not refer to a terminal anymore => +# the stream is fully buffered by default (see _IO_file_doallocate). +# The following fdopen call makes stdout line buffered again (at least on +# systems that support setvbuf - if setvbuf is not supported, the stream +# remains fully buffered (see PyFile_SetBufSize (Objects/fileobject.c))). +if not os.isatty(sys.stdout.fileno()): + sys.stdout = os.fdopen(sys.stdout.fileno(), sys.stdout.mode, 1) + osccli = commandline.Osc() r = babysitter.run(osccli) diff --git a/osc.fish b/osc.fish new file mode 100644 index 0000000..09dac9d --- /dev/null +++ b/osc.fish @@ -0,0 +1,116 @@ +# fish completion for git +# vim: smartindent:expandtab:ts=2:sw=2 + +function __fish_osc_needs_command + set cmd (commandline -opc) + if contains "$cmd" 'osc' 'osc help' + return 0 + end + return 1 +end + +function __fish_osc_using_command + set cmd (commandline -opc) + if [ (count $cmd) -gt 1 ] + for arg in $argv + if [ $arg = $cmd[2] ] + return 0 + end + end + end + return 1 +end + +# general options +complete -f -c osc -n 'not __fish_osc_needs_command' -s A -l apiurl -d 'specify URL to access API server at or an alias' +complete -f -c osc -n 'not __fish_osc_needs_command' -s c -l config -d 'specify alternate configuration file' +complete -f -c osc -n 'not __fish_osc_needs_command' -s d -l debug -d 'print info useful for debugging' +complete -f -c osc -n 'not __fish_osc_needs_command' -l debugger -d 'jump into the debugger before executing anything' +complete -f -c osc -n 'not __fish_osc_needs_command' -s h -l help -d 'show this help message and exit' +complete -f -c osc -n 'not __fish_osc_needs_command' -s H -l http-debug -d 'debug HTTP traffic (filters some headers)' +complete -f -c osc -n 'not __fish_osc_needs_command' -l http-full-debug -d 'debug HTTP traffic (filters no headers)' +complete -f -c osc -n 'not __fish_osc_needs_command' -l no-gnome-keyring -d 'disable usage of GNOME Keyring' +complete -f -c osc -n 'not __fish_osc_needs_command' -l no-keyring -d 'disable usage of desktop keyring system' +complete -f -c osc -n 'not __fish_osc_needs_command' -l post-mortem -d 'jump into the debugger in case of errors' +complete -f -c osc -n 'not __fish_osc_needs_command' -s q -l quiet -d 'be quiet, not verbose' +complete -f -c osc -n 'not __fish_osc_needs_command' -s t -l traceback -d 'print call trace in case of errors' +complete -f -c osc -n 'not __fish_osc_needs_command' -s v -l verbose -d 'increase verbosity' +complete -f -c osc -n 'not __fish_osc_needs_command' -l version -d 'show program\'s version number and exit' + +# osc commands +complete -f -c osc -n '__fish_osc_needs_command' -a 'add' -d 'Mark files to be added upon the next commit' +complete -f -c osc -n '__fish_osc_needs_command' -a 'addremove ar' -d 'Adds new files, removes disappeared files' +complete -f -c osc -n '__fish_osc_needs_command' -a 'aggregatepac' -d '"Aggregate" a package to another package' +complete -f -c osc -n '__fish_osc_needs_command' -a 'api' -d 'Issue an arbitrary request to the API' +complete -f -c osc -n '__fish_osc_needs_command' -a 'branch bco branchco getpac' -d 'Branch a package' +complete -f -c osc -n '__fish_osc_needs_command' -a 'chroot' -d 'into the buildchroot' +complete -f -c osc -n '__fish_osc_needs_command' -a 'clean' -d 'removes all untracked files from the package working ...' +complete -f -c osc -n '__fish_osc_needs_command' -a 'commit checkin ci' -d 'Upload content to the repository server' +complete -f -c osc -n '__fish_osc_needs_command' -a 'config' -d 'get/set a config option' +complete -f -c osc -n '__fish_osc_needs_command' -a 'copypac' -d 'Copy a package' +complete -f -c osc -n '__fish_osc_needs_command' -a 'createincident' -d 'Create a maintenance incident' +complete -f -c osc -n '__fish_osc_needs_command' -a 'createrequest creq' -d 'create multiple requests with a single command' +complete -f -c osc -n '__fish_osc_needs_command' -a 'delete del remove rm' -d 'Mark files or package directories to be deleted upon ...' +complete -f -c osc -n '__fish_osc_needs_command' -a 'deleterequest deletereq dr dropreq droprequest' -d 'Request to delete (or "drop") a package or project' +complete -f -c osc -n '__fish_osc_needs_command' -a 'dependson whatdependson' -d 'Show the build dependencies' +complete -f -c osc -n '__fish_osc_needs_command' -a 'detachbranch' -d 'replace a link with its expanded sources' +complete -f -c osc -n '__fish_osc_needs_command' -a 'develproject bsdevelproject dp' -d 'print the devel project / package of a package' +complete -f -c osc -n '__fish_osc_needs_command' -a 'diff di ldiff linkdiff' -d 'Generates a diff' +complete -f -c osc -n '__fish_osc_needs_command' -a 'distributions dists' -d 'Shows all available distributions' +complete -f -c osc -n '__fish_osc_needs_command' -a 'getbinaries' -d 'Download binaries to a local directory' +complete -f -c osc -n '__fish_osc_needs_command' -a 'help ? h' -d 'give detailed help on a specific sub-command' +complete -f -c osc -n '__fish_osc_needs_command' -a 'importsrcpkg' -d 'Import a new package from a src.rpm' +complete -f -c osc -n '__fish_osc_needs_command' -a 'info' -d 'Print information about a working copy' +complete -f -c osc -n '__fish_osc_needs_command' -a 'init' -d 'Initialize a directory as working copy' +complete -f -c osc -n '__fish_osc_needs_command' -a 'jobhistory jobhist' -d 'Shows the job history of a project' +complete -f -c osc -n '__fish_osc_needs_command' -a 'linkpac' -d '"Link" a package to another package' +complete -f -c osc -n '__fish_osc_needs_command' -a 'linktobranch' -d 'Convert a package containing a classic link with patc...' +complete -f -c osc -n '__fish_osc_needs_command' -a 'list LL lL ll ls' -d 'List sources or binaries on the server' +complete -f -c osc -n '__fish_osc_needs_command' -a 'localbuildlog lbl' -d 'Shows the build log of a local buildchroot' +complete -f -c osc -n '__fish_osc_needs_command' -a 'log' -d 'Shows the commit log of a package' +complete -f -c osc -n '__fish_osc_needs_command' -a 'maintainer bugowner' -d 'Show maintainers according to server side configuration' +complete -f -c osc -n '__fish_osc_needs_command' -a 'maintenancerequest mr' -d 'Create a request for starting a maintenance incident.' +complete -f -c osc -n '__fish_osc_needs_command' -a 'man' -d 'generates a man page' +complete -f -c osc -n '__fish_osc_needs_command' -a 'mbranch maintained sm' -d 'Search or banch multiple instances of a package' +complete -f -c osc -n '__fish_osc_needs_command' -a 'meta' -d 'Show meta information, or edit it' +complete -f -c osc -n '__fish_osc_needs_command' -a 'mkpac' -d 'Create a new package under version control' +complete -f -c osc -n '__fish_osc_needs_command' -a 'mv' -d 'Move SOURCE file to DEST and keep it under version co...' +complete -f -c osc -n '__fish_osc_needs_command' -a 'my' -d 'show waiting work, packages, projects or requests inv...' +complete -f -c osc -n '__fish_osc_needs_command' -a 'patchinfo' -d 'Generate and edit a patchinfo file.' +complete -f -c osc -n '__fish_osc_needs_command' -a 'pdiff' -d 'Quick alias to diff the content of a package with its...' +complete -f -c osc -n '__fish_osc_needs_command' -a 'prdiff projdiff projectdiff' -d 'Server-side diff of two projects' +complete -f -c osc -n '__fish_osc_needs_command' -a 'prjresults pr' -d 'Shows project-wide build results' +complete -f -c osc -n '__fish_osc_needs_command' -a 'pull' -d 'merge the changes of the link target into your workin...' +complete -f -c osc -n '__fish_osc_needs_command' -a 'rdelete' -d 'Delete a project or packages on the server.' +complete -f -c osc -n '__fish_osc_needs_command' -a 'rdiff' -d 'Server-side "pretty" diff of two packages' +complete -f -c osc -n '__fish_osc_needs_command' -a 'rebuild rebuildpac' -d 'Trigger package rebuilds' +complete -f -c osc -n '__fish_osc_needs_command' -a 'release' -d 'Release sources and binaries' +complete -f -c osc -n '__fish_osc_needs_command' -a 'releaserequest' -d 'Create a request for releasing a maintenance update.' +complete -f -c osc -n '__fish_osc_needs_command' -a 'remotebuildlog rbl rblt rbuildlog rbuildlogtail remotebuildlogtail' -d 'Shows the build log of a package' +complete -f -c osc -n '__fish_osc_needs_command' -a 'repairlink' -d 'Repair a broken source link' +complete -f -c osc -n '__fish_osc_needs_command' -a 'repairwc' -d 'try to repair an inconsistent working copy' +complete -f -c osc -n '__fish_osc_needs_command' -a 'repositories platforms repos' -d 'shows repositories configured for a project. It skips...' +complete -f -c osc -n '__fish_osc_needs_command' -a 'repourls' -d 'Shows URLs of .repo files' +complete -f -c osc -n '__fish_osc_needs_command' -a 'request review rq' -d 'Show or modify requests and reviews' +complete -f -c osc -n '__fish_osc_needs_command' -a 'requestmaintainership reqbs reqbugownership reqmaintainership reqms requestbugownership' -d 'requests to add user as maintainer or bugowner' +complete -f -c osc -n '__fish_osc_needs_command' -a 'resolved' -d 'Remove "conflicted" state on working copy files' +complete -f -c osc -n '__fish_osc_needs_command' -a 'restartbuild abortbuild' -d 'Restart the build of a certain project or package' +complete -f -c osc -n '__fish_osc_needs_command' -a 'results r' -d 'Shows the build results of a package or project' +complete -f -c osc -n '__fish_osc_needs_command' -a 'revert' -d 'Restore changed files or the entire working copy.' +complete -f -c osc -n '__fish_osc_needs_command' -a 'rremove' -d 'Remove source files from selected package' +complete -f -c osc -n '__fish_osc_needs_command' -a 'search bse se' -d 'Search for a project and/or package.' +complete -f -c osc -n '__fish_osc_needs_command' -a 'service' -d 'Handle source services' +complete -f -c osc -n '__fish_osc_needs_command' -a 'setdevelproject sdp' -d 'Set the devel project / package of a package' +complete -f -c osc -n '__fish_osc_needs_command' -a 'setlinkrev' -d 'Updates a revision number in a source link.' +complete -f -c osc -n '__fish_osc_needs_command' -a 'signkey' -d 'Manage Project Signing Key' +complete -f -c osc -n '__fish_osc_needs_command' -a 'status st' -d 'Show status of files in working copy' +complete -f -c osc -n '__fish_osc_needs_command' -a 'submitrequest sr submitpac submitreq' -d 'Create request to submit source into another Project' +complete -f -c osc -n '__fish_osc_needs_command' -a 'token' -d 'Show and manage authentication token' +complete -f -c osc -n '__fish_osc_needs_command' -a 'triggerreason tr' -d 'Show reason why a package got triggered to build' +complete -f -c osc -n '__fish_osc_needs_command' -a 'undelete' -d 'Restores a deleted project or package on the server.' +complete -f -c osc -n '__fish_osc_needs_command' -a 'unlock' -d 'Unlocks a project or package' +complete -f -c osc -n '__fish_osc_needs_command' -a 'update up' -d 'Update a working copy' +complete -f -c osc -n '__fish_osc_needs_command' -a 'updatepacmetafromspec metafromspec updatepkgmetafromspec' -d 'Update package meta information from a specfile' +complete -f -c osc -n '__fish_osc_needs_command' -a 'vc' -d 'Edit the changes file' +complete -f -c osc -n '__fish_osc_needs_command' -a 'whois user who' -d 'Show fullname and email of a buildservice user' +complete -f -c osc -n '__fish_osc_needs_command' -a 'wipebinaries' -d 'Delete all binary packages of a certain project/package' diff --git a/osc/OscConfigParser.py b/osc/OscConfigParser.py index 3ed954f..bf154b1 100644 --- a/osc/OscConfigParser.py +++ b/osc/OscConfigParser.py @@ -323,6 +323,16 @@ def write(self, fp, comments = False): else: configparser.SafeConfigParser.write(self, fp) + def has_option(self, section, option, proper=False, **kwargs): + """ + Returns True, if the passed section contains the specified option. + If proper is True, True is only returned if the option is owned by + this section and not "inherited" from the default. + """ + if proper: + return self.optionxform(option) in self._sections[section].keys() + return configparser.SafeConfigParser.has_option(self, section, option, **kwargs) + # XXX: simplify! def __str__(self): ret = [] diff --git a/osc/babysitter.py b/osc/babysitter.py index 544418c..22c3d83 100644 --- a/osc/babysitter.py +++ b/osc/babysitter.py @@ -11,6 +11,7 @@ import sys import signal import traceback +from urlgrabber.grabber import URLGrabError from osc import oscerr from .oscsslexcp import NoSecureSSLError @@ -77,7 +78,7 @@ def run(prg, argv=None): return 1 except KeyboardInterrupt: print('interrupted!', file=sys.stderr) - return 1 + return 130 except oscerr.UserAbort: print('aborted.', file=sys.stderr) return 1 @@ -121,6 +122,7 @@ def run(prg, argv=None): if '' in body: msg = body.split('')[1] msg = msg.split('')[0] + msg = msg.replace('<', '<').replace('>' , '>').replace('&', '&') print(msg, file=sys.stderr) if e.code >= 500 and e.code <= 599: print('\nRequest: %s' % e.filename) @@ -140,6 +142,9 @@ def run(prg, argv=None): except URLError as e: print('Failed to reach a server:\n', e.reason, file=sys.stderr) return 1 + except URLGrabError as e: + print('Failed to grab %s: %s' % (e.url, e.strerror), file=sys.stderr) + return 1 except IOError as e: # ignore broken pipe if e.errno != errno.EPIPE: diff --git a/osc/build.py b/osc/build.py index 76d4eb9..a44fd26 100644 --- a/osc/build.py +++ b/osc/build.py @@ -33,6 +33,11 @@ from .conf import config, cookiejar +try: + from .meter import TextMeter +except: + TextMeter = None + change_personality = { 'i686': 'linux32', 'i586': 'linux32', @@ -53,20 +58,20 @@ ] can_also_build = { - 'aarch64':['aarch64'], # only needed due to used heuristics in build parameter evaluation - 'armv6l' :[ 'armv4l', 'armv5l', 'armv6l', 'armv5el', 'armv6el' ], - 'armv7l' :[ 'armv4l', 'armv5l', 'armv6l', 'armv7l', 'armv5el', 'armv6el', 'armv7el' ], - 'armv5el':[ 'armv4l', 'armv5l', 'armv5el' ], # not existing arch, just for compatibility - 'armv6el':[ 'armv4l', 'armv5l', 'armv6l', 'armv5el', 'armv6el' ], # not existing arch, just for compatibility - 'armv6hl':[ 'armv4l', 'armv5l', 'armv6l', 'armv5el', 'armv6el' ], - 'armv7el':[ 'armv4l', 'armv5l', 'armv6l', 'armv7l', 'armv5el', 'armv6el', 'armv7el' ], # not existing arch, just for compatibility - 'armv7hl':[ 'armv7hl' ], # not existing arch, just for compatibility - 'armv8el':[ 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el' ], # not existing arch, just for compatibility - 'armv8l' :[ 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el' ], # not existing arch, just for compatibility - 'armv5tel':[ 'armv4l', 'armv5el', 'armv5tel' ], + 'aarch64': ['aarch64'], # only needed due to used heuristics in build parameter evaluation + 'armv6l': [ 'armv4l', 'armv5l', 'armv6l', 'armv5el', 'armv6el' ], + 'armv7l': [ 'armv4l', 'armv5l', 'armv6l', 'armv7l', 'armv5el', 'armv6el', 'armv7el' ], + 'armv5el': [ 'armv4l', 'armv5l', 'armv5el' ], # not existing arch, just for compatibility + 'armv6el': [ 'armv4l', 'armv5l', 'armv6l', 'armv5el', 'armv6el' ], # not existing arch, just for compatibility + 'armv6hl': [ 'armv4l', 'armv5l', 'armv6l', 'armv5el', 'armv6el' ], + 'armv7el': [ 'armv4l', 'armv5l', 'armv6l', 'armv7l', 'armv5el', 'armv6el', 'armv7el' ], # not existing arch, just for compatibility + 'armv7hl': [ 'armv7hl' ], # not existing arch, just for compatibility + 'armv8el': [ 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el' ], # not existing arch, just for compatibility + 'armv8l': [ 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el' ], # not existing arch, just for compatibility + 'armv5tel': [ 'armv4l', 'armv5el', 'armv5tel' ], 's390x': ['s390' ], 'ppc64': [ 'ppc', 'ppc64', 'ppc64p7', 'ppc64le' ], - 'ppc64le':[ 'ppc64le' ], + 'ppc64le': [ 'ppc64le', 'ppc64' ], 'i586': [ 'i386' ], 'i686': [ 'i586', 'i386' ], 'x86_64': ['i686', 'i586', 'i386' ], @@ -100,7 +105,12 @@ def __init__(self, filename, apiurl, buildtype = 'spec', localpkgs = []): if root.find('error') != None: sys.stderr.write('buildinfo is broken... it says:\n') error = root.find('error').text - sys.stderr.write(error + '\n') + if error.startswith('unresolvable: '): + sys.stderr.write('unresolvable: ') + sys.stderr.write('\n '.join(error[14:].split(','))) + else: + sys.stderr.write(error) + sys.stderr.write('\n') sys.exit(1) if not (apiurl.startswith('https://') or apiurl.startswith('http://')): @@ -112,10 +122,16 @@ def __init__(self, filename, apiurl, buildtype = 'spec', localpkgs = []): # are we building .rpm or .deb? # XXX: shouldn't we deliver the type via the buildinfo? self.pacsuffix = 'rpm' - if self.buildtype == 'dsc': + if self.buildtype == 'dsc' or self.buildtype == 'collax': self.pacsuffix = 'deb' if self.buildtype == 'arch': self.pacsuffix = 'arch' + if self.buildtype == 'livebuild': + self.pacsuffix = 'deb' + if self.buildtype == 'snapcraft': + # atm ubuntu is used as base, but we need to be more clever when + # snapcraft also supports rpm + self.pacsuffix = 'deb' self.buildarch = root.find('arch').text if root.find('hostarch') != None: @@ -155,6 +171,11 @@ def __init__(self, filename, apiurl, buildtype = 'spec', localpkgs = []): self.noinstall_list = [ dep.name for dep in self.deps if dep.noinstall ] self.installonly_list = [ dep.name for dep in self.deps if dep.installonly ] + if root.find('preinstallimage') != None: + self.preinstallimage = root.find('preinstallimage') + else: + self.preinstallimage = None + def has_dep(self, name): for i in self.deps: @@ -178,7 +199,7 @@ def __init__(self, node, buildarch, pacsuffix, apiurl, localpkgs = []): self.mp = {} for i in ['binary', 'package', - 'epoch', 'version', 'release', + 'epoch', 'version', 'release', 'hdrmd5', 'project', 'repository', 'preinstall', 'vminstall', 'noinstall', 'installonly', 'runscripts', 'sb2install', @@ -211,13 +232,15 @@ def __init__(self, node, buildarch, pacsuffix, apiurl, localpkgs = []): self.mp['apiurl'] = apiurl if pacsuffix == 'deb': - filename = debquery.DebQuery.filename(self.mp['name'], self.mp['epoch'], self.mp['version'], self.mp['release'], self.mp['arch']) + canonname = debquery.DebQuery.filename(self.mp['name'], self.mp['epoch'], self.mp['version'], self.mp['release'], self.mp['arch']) elif pacsuffix == 'arch': - filename = archquery.ArchQuery.filename(self.mp['name'], self.mp['epoch'], self.mp['version'], self.mp['release'], self.mp['arch']) + canonname = archquery.ArchQuery.filename(self.mp['name'], self.mp['epoch'], self.mp['version'], self.mp['release'], self.mp['arch']) else: - filename = rpmquery.RpmQuery.filename(self.mp['name'], self.mp['epoch'], self.mp['version'], self.mp['release'], self.mp['arch']) + canonname = rpmquery.RpmQuery.filename(self.mp['name'], self.mp['epoch'], self.mp['version'], self.mp['release'], self.mp['arch']) - self.mp['filename'] = node.get('binary') or filename + self.mp['canonname'] = canonname + # maybe we should rename filename key to binary + self.mp['filename'] = node.get('binary') or canonname if self.mp['repopackage'] == '_repository': self.mp['repofilename'] = self.mp['name'] else: @@ -238,7 +261,7 @@ def makeurls(self, cachedir, urllist): # or if-modified-since, so the caching is simply name-based (on the assumption # that the filename is suitable as identifier) self.localdir = '%s/%s/%s/%s' % (cachedir, self.project, self.repository, self.arch) - self.fullfilename = os.path.join(self.localdir, self.filename) + self.fullfilename = os.path.join(self.localdir, self.canonname) self.url_local = 'file://%s' % self.fullfilename # first, add the local URL @@ -255,33 +278,100 @@ def __repr__(self): return "%s" % self.name - -def get_built_files(pacdir, pactype): - if pactype == 'rpm': +def get_preinstall_image(apiurl, arch, cache_dir, img_info): + """ + Searches preinstall image according to build info and downloads it to cache. + Returns preinstall image path, source and list of image binaries, which can + be used to create rpmlist. + NOTE: preinstall image can be used only for new build roots! + """ + imagefile = '' + imagesource = '' + img_bins = [] + for bin in img_info.findall('binary'): + img_bins.append(bin.text) + + img_project = img_info.get('project') + img_repository = img_info.get('repository') + img_arch = arch + img_pkg = img_info.get('package') + img_file = img_info.get('filename') + img_hdrmd5 = img_info.get('hdrmd5') + if not img_hdrmd5: + img_hdrmd5 = img_file + cache_path = '%s/%s/%s/%s' % (cache_dir, img_project, img_repository, img_arch) + ifile_path = '%s/%s' % (cache_path, img_file) + ifile_path_part = '%s.part' % ifile_path + + imagefile = ifile_path + imagesource = "%s/%s/%s [%s]" % (img_project, img_repository, img_pkg, img_hdrmd5) + + if not os.path.exists(ifile_path): + url = "%s/build/%s/%s/%s/%s/%s" % (apiurl, img_project, img_repository, img_arch, img_pkg, img_file) + print("downloading preinstall image %s" % imagesource) + if not os.path.exists(cache_path): + try: + os.makedirs(cache_path, mode=0o755) + except OSError as e: + print('packagecachedir is not writable for you?', file=sys.stderr) + print(e, file=sys.stderr) + sys.exit(1) + if sys.stdout.isatty() and TextMeter: + progress_obj = TextMeter(fo=sys.stdout) + else: + progress_obj = None + gr = OscFileGrabber(progress_obj=progress_obj) + try: + gr.urlgrab(url, filename=ifile_path_part, text='fetching image') + except URLGrabError as e: + print("Failed to download! ecode:%i errno:%i" % (e.code, e.errno)) + return ('', '', []) + # download ok, rename partial file to final file name + os.rename(ifile_path_part, ifile_path) + return (imagefile, imagesource, img_bins) + +def get_built_files(pacdir, buildtype): + if buildtype == 'spec': b_built = subprocess.Popen(['find', os.path.join(pacdir, 'RPMS'), '-name', '*.rpm'], stdout=subprocess.PIPE).stdout.read().strip() s_built = subprocess.Popen(['find', os.path.join(pacdir, 'SRPMS'), '-name', '*.rpm'], stdout=subprocess.PIPE).stdout.read().strip() - elif pactype == 'kiwi': + elif buildtype == 'kiwi': b_built = subprocess.Popen(['find', os.path.join(pacdir, 'KIWI'), '-type', 'f'], stdout=subprocess.PIPE).stdout.read().strip() - elif pactype == 'deb': + s_built = '' + elif buildtype == 'dsc' or buildtype == 'collax': b_built = subprocess.Popen(['find', os.path.join(pacdir, 'DEBS'), '-name', '*.deb'], stdout=subprocess.PIPE).stdout.read().strip() s_built = subprocess.Popen(['find', os.path.join(pacdir, 'SOURCES.DEB'), '-type', 'f'], stdout=subprocess.PIPE).stdout.read().strip() - elif pactype == 'arch': + elif buildtype == 'arch': b_built = subprocess.Popen(['find', os.path.join(pacdir, 'ARCHPKGS'), '-name', '*.pkg.tar*'], stdout=subprocess.PIPE).stdout.read().strip() s_built = '' + elif buildtype == 'livebuild': + b_built = subprocess.Popen(['find', os.path.join(pacdir, 'OTHER'), + '-name', '*.iso*'], + stdout=subprocess.PIPE).stdout.read().strip() + s_built = '' + elif buildtype == 'snapcraft': + b_built = subprocess.Popen(['find', os.path.join(pacdir, 'OTHER'), + '-name', '*.snap'], + stdout=subprocess.PIPE).stdout.read().strip() + s_built = '' + elif buildtype == 'appimage': + b_built = subprocess.Popen(['find', os.path.join(pacdir, 'OTHER'), + '-name', '*.AppImage'], + stdout=subprocess.PIPE).stdout.read().strip() + s_built = '' else: - print('WARNING: Unknown package type \'%s\'.' % pactype, file=sys.stderr) + print('WARNING: Unknown package type \'%s\'.' % buildtype, file=sys.stderr) b_built = '' s_built = '' return s_built, b_built @@ -311,14 +401,14 @@ def get_repo(path): return repositoryDirectory -def get_prefer_pkgs(dirs, wanted_arch, type): +def get_prefer_pkgs(dirs, wanted_arch, type, cpio): import glob - from .util import repodata, packagequery, cpio + from .util import repodata, packagequery paths = [] repositories = [] suffix = '*.rpm' - if type == 'dsc': + if type == 'dsc' or type == 'collax' or type == 'livebuild': suffix = '*.deb' elif type == 'arch': suffix = '*.pkg.tar.xz' @@ -340,9 +430,9 @@ def get_prefer_pkgs(dirs, wanted_arch, type): packageQueries.add(packageQuery) for path in paths: - if path.endswith('src.rpm'): + if path.endswith('.src.rpm') or path.endswith('.nosrc.rpm'): continue - if path.find('-debuginfo-') > 0: + if path.endswith('.patch.rpm') or path.endswith('.delta.rpm'): continue packageQuery = packagequery.PackageQuery.query(path) packageQueries.add(packageQuery) @@ -351,21 +441,27 @@ def get_prefer_pkgs(dirs, wanted_arch, type): for name, packageQuery in packageQueries.items()) depfile = create_deps(packageQueries.values()) - cpio = cpio.CpioWrite() cpio.add('deps', '\n'.join(depfile)) - return prefer_pkgs, cpio + return prefer_pkgs def create_deps(pkgqs): """ - creates a list of requires/provides which corresponds to build's internal + creates a list of dependencies which corresponds to build's internal dependency file format """ depfile = [] for p in pkgqs: id = '%s.%s-0/0/0: ' % (p.name(), p.arch()) - depfile.append('R:%s%s' % (id, ' '.join(p.requires()))) depfile.append('P:%s%s' % (id, ' '.join(p.provides()))) + depfile.append('R:%s%s' % (id, ' '.join(p.requires()))) + d = p.conflicts() + if d: + depfile.append('C:%s%s' % (id, ' '.join(d))) + d = p.obsoletes() + if d: + depfile.append('O:%s%s' % (id, ' '.join(d))) + depfile.append('I:%s%s-%s 0-%s' % (id, p.name(), p.evr(), p.arch())) return depfile @@ -381,7 +477,7 @@ def check_trusted_projects(apiurl, projects): if not prj in trusted: print("\nThe build root needs packages from project '%s'." % prj) print("Note that malicious packages can compromise the build result or even your system.") - r = raw_input(trustprompt % { 'project':prj }) + r = raw_input(trustprompt % { 'project': prj }) if r == '1': print("adding '%s' to ~/.oscrc: ['%s']['trusted_prj']" % (prj, apiurl)) trusted.append(prj) @@ -402,15 +498,23 @@ def main(apiurl, opts, argv): build_root = None cache_dir = None build_uid = '' + vm_memory = config['build-memory'] vm_type = config['build-type'] + vm_telnet = None build_descr = os.path.abspath(build_descr) build_type = os.path.splitext(build_descr)[1][1:] if os.path.basename(build_descr) == 'PKGBUILD': build_type = 'arch' - if build_type not in ['spec', 'dsc', 'kiwi', 'arch']: + if os.path.basename(build_descr) == 'build.collax': + build_type = 'collax' + if os.path.basename(build_descr) == 'appimage.yml': + build_type = 'appimage' + if os.path.basename(build_descr) == 'snapcraft.yaml': + build_type = 'snapcraft' + if build_type not in ['spec', 'dsc', 'kiwi', 'arch', 'collax', 'livebuild', 'snapcraft', 'appimage']: raise oscerr.WrongArgs( - 'Unknown build type: \'%s\'. Build description should end in .spec, .dsc or .kiwi.' \ + 'Unknown build type: \'%s\'. Build description should end in .spec, .dsc, .kiwi, or .livebuild. Or being named PKGBUILD, build.collax, appimage.yml or snapcraft.yaml' \ % build_type) if not os.path.isfile(build_descr): raise oscerr.WrongArgs('Error: build description file named \'%s\' does not exist.' % build_descr) @@ -436,6 +540,8 @@ def main(apiurl, opts, argv): build_root = opts.root if opts.target: buildargs.append('--target=%s' % opts.target) + if opts.threads: + buildargs.append('--threads=%s' % opts.threads) if opts.jobs: buildargs.append('--jobs=%s' % opts.jobs) elif config['build-jobs'] > 1: @@ -481,8 +587,12 @@ def main(apiurl, opts, argv): else: print('Error: build-uid arg must be 2 colon separated numerics: "uid:gid" or "caller"', file=sys.stderr) return 1 + if opts.vm_memory: + vm_memory = opts.vm_memory if opts.vm_type: vm_type = opts.vm_type + if opts.vm_telnet: + vm_telnet = opts.vm_telnet if opts.alternative_project: prj = opts.alternative_project pac = '_repository' @@ -495,6 +605,7 @@ def main(apiurl, opts, argv): if opts.shell: buildargs.append("--shell") + orig_build_root = config['build-root'] # make it possible to override configuration of the rc file for var in ['OSC_PACKAGECACHEDIR', 'OSC_SU_WRAPPER', 'OSC_BUILD_ROOT']: val = os.getenv(var) @@ -513,14 +624,18 @@ def main(apiurl, opts, argv): except oscerr.NoWorkingCopy: opts.local_package = True if opts.local_package: - pacname = os.path.splitext(build_descr)[0] + pacname = os.path.splitext(os.path.basename(build_descr))[0] apihost = urlsplit(apiurl)[1] if not build_root: + build_root = config['build-root'] + if build_root == orig_build_root: + # ENV var was not set + build_root = config['api_host_options'][apiurl].get('build-root', build_root) try: - build_root = config['build-root'] % {'repo': repo, 'arch': arch, + build_root = build_root % {'repo': repo, 'arch': arch, 'project': prj, 'package': pacname, 'apihost': apihost} except: - build_root = config['build-root'] + pass cache_dir = config['packagecachedir'] % {'apihost': apihost} @@ -553,11 +668,41 @@ def main(apiurl, opts, argv): s += "%%define %s\n" % i build_descr_data = s + build_descr_data + cpiodata = None + servicefile = os.path.join(os.path.dirname(build_descr), "_service") + if not os.path.isfile(servicefile): + servicefile = os.path.join(os.path.dirname(build_descr), "_service") + if not os.path.isfile(servicefile): + servicefile = None + else: + print('Using local _service file') + buildenvfile = os.path.join(os.path.dirname(build_descr), "_buildenv." + repo + "." + arch) + if not os.path.isfile(buildenvfile): + buildenvfile = os.path.join(os.path.dirname(build_descr), "_buildenv") + if not os.path.isfile(buildenvfile): + buildenvfile = None + else: + print('Using local buildenv file: %s' % os.path.basename(buildenvfile)) + if buildenvfile or servicefile: + from .util import cpio + if not cpiodata: + cpiodata = cpio.CpioWrite() + if opts.prefer_pkgs: print('Scanning the following dirs for local packages: %s' % ', '.join(opts.prefer_pkgs)) - prefer_pkgs, cpio = get_prefer_pkgs(opts.prefer_pkgs, arch, build_type) - cpio.add(os.path.basename(build_descr), build_descr_data) - build_descr_data = cpio.get() + from .util import cpio + if not cpiodata: + cpiodata = cpio.CpioWrite() + prefer_pkgs = get_prefer_pkgs(opts.prefer_pkgs, arch, build_type, cpiodata) + + if cpiodata: + cpiodata.add(os.path.basename(build_descr), build_descr_data) + # buildenv must come last for compatibility reasons... + if buildenvfile: + cpiodata.add("buildenv", open(buildenvfile).read()) + if servicefile: + cpiodata.add("_service", open(servicefile).read()) + build_descr_data = cpiodata.get() # special handling for overlay and rsync-src/dest specialcmdopts = [] @@ -636,8 +781,8 @@ def main(apiurl, opts, argv): pkg_meta_e = None try: # take care, not to run into double trouble. - pkg_meta_e = meta_exists(metatype='pkg', path_args=(quote_plus(prj), - quote_plus(pac)), template_args=None, create_new=False, + pkg_meta_e = meta_exists(metatype='pkg', path_args=(quote_plus(prj), + quote_plus(pac)), template_args=None, create_new=False, apiurl=apiurl) except: pass @@ -645,10 +790,10 @@ def main(apiurl, opts, argv): if pkg_meta_e: print('ERROR: Either wrong repo/arch as parameter or a parse error of .spec/.dsc/.kiwi file due to syntax error', file=sys.stderr) else: - print('The package \'%s\' does not exists - please ' \ + print('The package \'%s\' does not exist - please ' \ 'rerun with \'--local-package\'' % pac, file=sys.stderr) else: - print('The project \'%s\' does not exists - please ' \ + print('The project \'%s\' does not exist - please ' \ 'rerun with \'--alternative-project \'' % prj, file=sys.stderr) sys.exit(1) else: @@ -720,8 +865,24 @@ def main(apiurl, opts, argv): enable_cpio = not opts.disable_cpio_bulk_download, cookiejar=cookiejar) - # implicitly trust the project we are building for - check_trusted_projects(apiurl, [ i for i in bi.projects.keys() if not i == prj ]) + if not opts.trust_all_projects: + # implicitly trust the project we are building for + check_trusted_projects(apiurl, [ i for i in bi.projects.keys() if not i == prj ]) + + imagefile = '' + imagesource = '' + imagebins = [] + if (not config['no_preinstallimage'] and not opts.nopreinstallimage and + bi.preinstallimage and + not opts.noinit and not opts.offline and + (opts.clean or (not os.path.exists(build_root + "/installed-pkg") and + not os.path.exists(build_root + "/.build/init_buildsystem.data")))): + (imagefile, imagesource, imagebins) = get_preinstall_image(apiurl, arch, cache_dir, bi.preinstallimage) + if imagefile: + # remove binaries from build deps which are included in preinstall image + for i in bi.deps: + if i.name in imagebins: + bi.remove_dep(i.name) # now update the package cache fetcher.run(bi) @@ -760,8 +921,9 @@ class mytmpdir: """ temporary directory that removes itself""" def __init__(self, *args, **kwargs): self.name = mkdtemp(*args, **kwargs) + _rmtree = staticmethod(shutil.rmtree) def cleanup(self): - shutil.rmtree(self.name) + self._rmtree(self.name) def __del__(self): self.cleanup() def __exit__(self): @@ -862,7 +1024,7 @@ def __str__(self): buildargs.append('--kiwi-parameter') buildargs.append('--add-repo') buildargs.append('--kiwi-parameter') - buildargs.append("repos/"+path) + buildargs.append("dir://./repos/"+path) buildargs.append('--kiwi-parameter') buildargs.append('--add-repotype') buildargs.append('--kiwi-parameter') @@ -875,12 +1037,12 @@ def __str__(self): if not m: # short path without obs instance name m = re.match(r"obs://([^/]+)/(.+)", xml.find('source').get('path')) - project=m.group(1).replace(":",":/") + project=m.group(1).replace(":", ":/") repo=m.group(2) buildargs.append('--kiwi-parameter') buildargs.append('--add-repo') buildargs.append('--kiwi-parameter') - buildargs.append("repos/"+project+"/"+repo) + buildargs.append("dir://./repos/"+project+"/"+repo) buildargs.append('--kiwi-parameter') buildargs.append('--add-repotype') buildargs.append('--kiwi-parameter') @@ -905,14 +1067,32 @@ def __str__(self): else: print('WARNING: unknown packages get not verified, they can compromise your system !') + for i in bi.deps: + if i.hdrmd5: + from .util import packagequery + hdrmd5 = packagequery.PackageQuery.queryhdrmd5(i.fullfilename) + if not hdrmd5: + print("Error: cannot get hdrmd5 for %s" % i.fullfilename) + sys.exit(1) + if hdrmd5 != i.hdrmd5: + print("Error: hdrmd5 mismatch for %s: %s != %s" % (i.fullfilename, hdrmd5, i.hdrmd5)) + sys.exit(1) + print('Writing build configuration') if build_type == 'kiwi': rpmlist = [ '%s %s\n' % (i.name, i.fullfilename) for i in bi.deps if not i.noinstall ] else: rpmlist = [ '%s %s\n' % (i.name, i.fullfilename) for i in bi.deps ] + for i in imagebins: + rpmlist.append('%s preinstallimage\n' % i) rpmlist += [ '%s %s\n' % (i[0], i[1]) for i in rpmlist_prefers ] + if imagefile: + rpmlist.append('preinstallimage: %s\n' % imagefile) + if imagesource: + rpmlist.append('preinstallimagesource: %s\n' % imagesource) + rpmlist.append('preinstall: ' + ' '.join(bi.preinstall_list) + '\n') rpmlist.append('vminstall: ' + ' '.join(bi.vminstall_list) + '\n') rpmlist.append('sb2install: ' + ' '.join(bi.sb2install_list) + '\n') @@ -947,7 +1127,11 @@ def __str__(self): my_build_swap = build_root + '/swap' vm_options = [ '--vm-type=%s' % vm_type ] - if vm_type != 'lxc' and vm_type != 'emulator': + if vm_telnet: + vm_options += [ '--vm-telnet=' + vm_telnet ] + if vm_memory: + vm_options += [ '--memory=' + vm_memory ] + if vm_type != 'lxc': vm_options += [ '--vm-disk=' + my_build_device ] vm_options += [ '--vm-swap=' + my_build_swap ] vm_options += [ '--logfile=%s/.build.log' % build_root ] @@ -955,16 +1139,21 @@ def __str__(self): if os.access(build_root, os.W_OK) and os.access('/dev/kvm', os.W_OK): # so let's hope there's also an fstab entry need_root = False + if config['build-kernel']: + vm_options += [ '--vm-kernel=' + config['build-kernel'] ] + if config['build-initrd']: + vm_options += [ '--vm-initrd=' + config['build-initrd'] ] + build_root += '/.mount' - if config['build-memory']: - vm_options += [ '--memory=' + config['build-memory'] ] if config['build-vmdisk-rootsize']: vm_options += [ '--vmdisk-rootsize=' + config['build-vmdisk-rootsize'] ] if config['build-vmdisk-swapsize']: vm_options += [ '--vmdisk-swapsize=' + config['build-vmdisk-swapsize'] ] if config['build-vmdisk-filesystem']: vm_options += [ '--vmdisk-filesystem=' + config['build-vmdisk-filesystem'] ] + if config['build-vm-user']: + vm_options += [ '--vm-user=' + config['build-vm-user'] ] if opts.preload: @@ -1010,7 +1199,7 @@ def __str__(self): pacdir = os.path.join(build_root, pacdir) if os.path.exists(pacdir): - (s_built, b_built) = get_built_files(pacdir, bi.pacsuffix) + (s_built, b_built) = get_built_files(pacdir, bi.buildtype) print() if s_built: print(s_built) diff --git a/osc/checker.py b/osc/checker.py index 87d621b..f80a0da 100644 --- a/osc/checker.py +++ b/osc/checker.py @@ -90,7 +90,7 @@ def readkey(self, file): def check(self, pkg): # avoid errors on non rpm - if pkg[-4:] != '.rpm': + if pkg[-4:] != '.rpm': return fd = None try: diff --git a/osc/cmdln.py b/osc/cmdln.py index 8fa2cc0..da49ce6 100644 --- a/osc/cmdln.py +++ b/osc/cmdln.py @@ -393,47 +393,63 @@ def cmdloop(self, intro=None): """ self.cmdlooping = True self.preloop() - if intro is None: - intro = self.intro - if intro: - intro_str = self._str(intro) - self.stdout.write(intro_str+'\n') - self.stop = False - retval = None - while not self.stop: - if self.cmdqueue: - argv = self.cmdqueue.pop(0) - assert isinstance(argv, (list, tuple)), \ - "item on 'cmdqueue' is not a sequence: %r" % argv - else: - if self.use_rawinput: - try: - try: - #python 2.x - line = raw_input(self._prompt_str) - except NameError: - line = input(self._prompt_str) - except EOFError: - line = 'EOF' + if self.use_rawinput and self.completekey: + try: + import readline + self.old_completer = readline.get_completer() + readline.set_completer(self.complete) + readline.parse_and_bind(self.completekey+": complete") + except ImportError: + pass + try: + if intro is None: + intro = self.intro + if intro: + intro_str = self._str(intro) + self.stdout.write(intro_str+'\n') + self.stop = False + retval = None + while not self.stop: + if self.cmdqueue: + argv = self.cmdqueue.pop(0) + assert isinstance(argv, (list, tuple)), \ + "item on 'cmdqueue' is not a sequence: %r" % argv else: - self.stdout.write(self._prompt_str) - self.stdout.flush() - line = self.stdin.readline() - if not len(line): - line = 'EOF' + if self.use_rawinput: + try: + try: + #python 2.x + line = raw_input(self._prompt_str) + except NameError: + line = input(self._prompt_str) + except EOFError: + line = 'EOF' else: - line = line[:-1] # chop '\n' - argv = line2argv(line) - try: - argv = self.precmd(argv) - retval = self.onecmd(argv) - self.postcmd(argv) - except: - if not self.cmdexc(argv): - raise - retval = 1 - self.lastretval = retval - self.postloop() + self.stdout.write(self._prompt_str) + self.stdout.flush() + line = self.stdin.readline() + if not len(line): + line = 'EOF' + else: + line = line[:-1] # chop '\n' + argv = line2argv(line) + try: + argv = self.precmd(argv) + retval = self.onecmd(argv) + self.postcmd(argv) + except: + if not self.cmdexc(argv): + raise + retval = 1 + self.lastretval = retval + self.postloop() + finally: + if self.use_rawinput and self.completekey: + try: + import readline + readline.set_completer(self.old_completer) + except ImportError: + pass self.cmdlooping = False return retval @@ -516,7 +532,7 @@ def parseline(self, line): elif line[0] == '?': line = 'help ' + line[1:] i, n = 0, len(line) - while i < n and line[i] in self.identchars: + while i < n and line[i] in self.identchars: i = i+1 cmd, arg = line[:i], line[i:].strip() return cmd, arg, line @@ -574,7 +590,7 @@ def do_help(self, argv): doc = self.__class__.__doc__ # try class docstring if doc is None: # Try to provide some reasonable useful default help. - if self.cmdlooping: + if self.cmdlooping: prefix = "" else: prefix = self.name+' ' @@ -739,7 +755,7 @@ def _help_get_command_list(self): token2canonical = self._get_canonical_map() aliases = {} for token, cmdname in token2canonical.items(): - if token == cmdname: + if token == cmdname: continue aliases.setdefault(cmdname, []).append(token) @@ -803,7 +819,7 @@ def _help_preprocess_help_list(self, help, cmdname=None): helpnames = {} token2cmdname = self._get_canonical_map() for attr in self.get_names(): - if not attr.startswith("help_"): + if not attr.startswith("help_"): continue helpname = attr[5:] if helpname not in token2cmdname: @@ -854,9 +870,9 @@ def _help_preprocess_cmd_usage(self, help, cmdname=None): # Adjust argcount for possible *args and **kwargs arguments. argcount = co_argcount - if co_flags & CO_FLAGS_ARGS: + if co_flags & CO_FLAGS_ARGS: argcount += 1 - if co_flags & CO_FLAGS_KWARGS: + if co_flags & CO_FLAGS_KWARGS: argcount += 1 # Determine the usage string. @@ -937,9 +953,9 @@ def _get_canonical_map(self): token2canonical = {} cmd2funcname = {} # use a dict to strip duplicates for attr in self.get_names(): - if attr.startswith("do_"): + if attr.startswith("do_"): cmdname = attr[3:] - elif attr.startswith("_do_"): + elif attr.startswith("_do_"): cmdname = attr[4:] else: continue @@ -1263,7 +1279,7 @@ def _format_linedata(linedata, indent, indent_width): SPACING = 3 MAX_NAME_WIDTH = 15 - NAME_WIDTH = min(max([len(s) for s,d in linedata]), MAX_NAME_WIDTH) + NAME_WIDTH = min(max([len(s) for s, d in linedata]), MAX_NAME_WIDTH) DOC_WIDTH = WIDTH - NAME_WIDTH - SPACING for namestr, doc in linedata: line = indent + namestr @@ -1371,12 +1387,12 @@ def line2argv(line): i = -1 while True: i += 1 - if i >= len(line): + if i >= len(line): break ch = line[i] if ch == "\\": # escaped char always added to arg, regardless of state - if arg is None: + if arg is None: arg = "" i += 1 arg += line[i] @@ -1394,11 +1410,11 @@ def line2argv(line): arg += ch elif state == "default": if ch == '"': - if arg is None: + if arg is None: arg = "" state = "double-quoted" elif ch == "'": - if arg is None: + if arg is None: arg = "" state = "single-quoted" elif ch in string.whitespace: @@ -1406,7 +1422,7 @@ def line2argv(line): argv.append(arg) arg = None else: - if arg is None: + if arg is None: arg = "" arg += ch if arg is not None: @@ -1485,7 +1501,7 @@ def _dedentlines(lines, tabsize=8, skip_first_line=False): break else: continue # skip all-whitespace lines - if DEBUG: + if DEBUG: print("dedent: indent=%d: %r" % (indent, line)) if margin is None: margin = indent @@ -1496,7 +1512,7 @@ def _dedentlines(lines, tabsize=8, skip_first_line=False): if margin is not None and margin > 0: for i, line in enumerate(lines): - if i == 0 and skip_first_line: + if i == 0 and skip_first_line: continue removed = 0 for j, ch in enumerate(line): @@ -1505,7 +1521,7 @@ def _dedentlines(lines, tabsize=8, skip_first_line=False): elif ch == '\t': removed += tabsize - (removed % tabsize) elif ch in '\r\n': - if DEBUG: + if DEBUG: print("dedent: %r: EOL -> strip up to EOL" % line) lines[i] = lines[i][j:] break diff --git a/osc/commandline.py b/osc/commandline.py index d48cecd..561112f 100644 --- a/osc/commandline.py +++ b/osc/commandline.py @@ -47,7 +47,7 @@ * http://en.opensuse.org/openSUSE:Build_Service_Tutorial * http://en.opensuse.org/openSUSE:OSC .PP -You can modify osc commands, or roll you own, via the plugin API: +You can modify osc commands, or roll your own, via the plugin API: * http://en.opensuse.org/openSUSE:OSC_plugins .SH AUTHOR osc was written by several authors. This man page is automatically generated. @@ -67,7 +67,7 @@ class Osc(cmdln.Cmdln): * http://en.opensuse.org/openSUSE:Build_Service_Tutorial * http://en.opensuse.org/openSUSE:OSC - You can modify osc commands, or roll you own, via the plugin API: + You can modify osc commands, or roll your own, via the plugin API: * http://en.opensuse.org/openSUSE:OSC_plugins """ name = 'osc' @@ -150,7 +150,7 @@ def postoptparse(self, try_again = True): conf.write_initial_config(e.file, config) print('done', file=sys.stderr) - if try_again: + if try_again: self.postoptparse(try_again = False) except oscerr.ConfigMissingApiurl as e: print(e.msg, file=sys.stderr) @@ -158,7 +158,7 @@ def postoptparse(self, try_again = True): user = raw_input('Username: ') passwd = getpass.getpass() conf.add_section(e.file, e.url, user, passwd) - if try_again: + if try_again: self.postoptparse(try_again = False) self.options.verbose = conf.config['verbose'] @@ -182,7 +182,7 @@ def get_api_url(self): ## check for Stale NFS file handle: '.' try: os.stat('.') - except Exception as ee: + except Exception as ee: e = ee print("os.getcwd() failed: ", e, file=sys.stderr) sys.exit(1) @@ -304,7 +304,7 @@ def do_list(self, subcmd, opts, *args): pass if len(args) > 0: project = args[0] - if project == '/': + if project == '/': project = None if project == '.': cwd = os.getcwd() @@ -340,7 +340,7 @@ def do_list(self, subcmd, opts, *args): if opts.binaries and opts.expand: raise oscerr.WrongOptions('Sorry, --binaries and --expand are mutual exclusive.') - apiurl = self.get_api_url() + apiurl = self.get_api_url() # list binaries if opts.binaries: @@ -397,9 +397,7 @@ def do_list(self, subcmd, opts, *args): if opts.verbose: if self.options.verbose: print('Sorry, the --verbose option is not implemented for projects.', file=sys.stderr) - if opts.expand: - raise oscerr.WrongOptions('Sorry, the --expand option is not implemented for projects.') - for pkg in meta_get_packagelist(apiurl, project, opts.deleted): + for pkg in meta_get_packagelist(apiurl, project, deleted = opts.deleted, expand = opts.expand): print(pkg) elif len(args) == 2 or len(args) == 3: @@ -428,7 +426,7 @@ def do_list(self, subcmd, opts, *args): print_not_found = False else: print('\n'.join(l)) - if opts.expand or opts.unexpand or not link_seen: + if opts.expand or opts.unexpand or not link_seen: break m = show_files_meta(apiurl, project, package) li = Linkinfo() @@ -443,7 +441,96 @@ def do_list(self, subcmd, opts, *args): opts.expand = True if fname and print_not_found: print('file \'%s\' does not exist' % fname) + return 1 + + + @cmdln.option('-s', '--skip-disabled', action='store_true', + help='Skip disabled channels. Otherwise the source gets added, but not the repositories.') + @cmdln.option('-e', '--enable-all', action='store_true', + help='Enable all added channels including the ones disabled by default.') + def do_addchannels(self, subcmd, opts, *args): + """${cmd_name}: Add channels to project. + + The command adds all channels which are defined to be used for a given source package. + The source link target is used to lookup the channels. The command can be + used for a certain package or for all in the specified project. + + In case no channel is defined the operation is just returning. + + Examples: + osc addchannels [PROJECT [PACKAGE]] + ${cmd_option_list} + """ + + apiurl = self.get_api_url() + localdir = os.getcwd() + channel = None + if not args: + if is_project_dir(localdir) or is_package_dir(localdir): + project = store_read_project(localdir) + elif is_package_dir(localdir): + project = store_read_project(localdir) + channel = store_read_package(localdir) + else: + raise oscerr.WrongArgs('Either specify project [package] or call it from a project/package working copy') + else: + project = args[0] + + query = {'cmd': 'addchannels'} + if opts.enable_all and opts.skip_disabled: + raise oscerr.WrongOptions('--enable-all and --skip-disabled options are mutually exclusive') + elif opts.enable_all: + query['mode'] = 'enable_all' + elif opts.skip_disabled: + query['mode'] = 'skip_disabled' + + print("Looking for channels...") + url = makeurl(apiurl, ['source', project], query=query) + if channel: + url = makeurl(apiurl, ['source', project, channel], query=query) + f = http_POST(url) + + @cmdln.alias('enablechannel') + def do_enablechannels(self, subcmd, opts, *args): + """${cmd_name}: Enables channels + + Enables existing channel packages in a project. Enabling means adding the + needed repositories for building. + The command can be used to enable a specific one or all channels of a project. + + Examples: + osc enablechannels [PROJECT [CHANNEL_PACKAGE]] + ${cmd_option_list} + """ + + apiurl = self.get_api_url() + localdir = os.getcwd() + channel = None + if not args: + if is_project_dir(localdir): + project = store_read_project(localdir) + elif is_package_dir(localdir): + project = store_read_project(localdir) + channel = store_read_package(localdir) + else: + raise oscerr.WrongArgs('Either specify project [package] or call it from a project/package working copy') + else: + project = args[0] + if len(args) > 1: + channel = args[1] + + query = {} + if channel: + query['cmd'] = 'enablechannel' + else: + query = {'cmd': 'modifychannels', 'mode': 'enable_all'} + + print("Enable channel(s)...") + url = makeurl(apiurl, ['source', project], query=query) + if channel: + url = makeurl(apiurl, ['source', project, channel], query=query) + f = http_POST(url) @cmdln.option('-f', '--force', action='store_true', help='force generation of new patchinfo file, do not update existing one.') @@ -461,7 +548,7 @@ def do_patchinfo(self, subcmd, opts, *args): ${cmd_option_list} """ - apiurl = self.get_api_url() + apiurl = self.get_api_url() project_dir = localdir = os.getcwd() patchinfo = 'patchinfo' if len(args) == 0: @@ -509,7 +596,7 @@ def do_patchinfo(self, subcmd, opts, *args): f = http_POST(url) # CAUTION: - # Both conf.config['checkout_no_colon'] and conf.config['checkout_rooted'] + # Both conf.config['checkout_no_colon'] and conf.config['checkout_rooted'] # fool this test: if is_package_dir(localdir): pac = Package(localdir) @@ -638,6 +725,8 @@ def do_token(self, subcmd, opts, *args): fd = urlopen(req, data=None) print(fd.read()) else: + if args and args[0] in ['create', 'delete', 'trigger']: + raise oscerr.WrongArgs("Did you mean --" + args[0] + "?") # just list token for data in streamfile(url, http_GET): sys.stdout.write(data) @@ -654,6 +743,10 @@ def do_token(self, subcmd, opts, *args): @cmdln.option('-F', '--file', metavar='FILE', help='read metadata from FILE, instead of opening an editor. ' '\'-\' denotes standard input. ') + @cmdln.option('-r', '--revision', metavar='REV', + help='checkout given revision instead of head revision. For prj and prjconf meta only') + @cmdln.option('-m', '--message', metavar='TEXT', + help='specify log message TEXT. For prj and prjconf meta only') @cmdln.option('-e', '--edit', action='store_true', help='edit metadata') @cmdln.option('-c', '--create', action='store_true', @@ -688,6 +781,8 @@ def do_meta(self, subcmd, opts, *args): the --file switch. If the argument is '-', input is taken from stdin: osc meta prjconf home:user | sed ... | osc meta prjconf home:user -F - + For meta prj and prjconf updates optional commit messages can be applied with --message. + When trying to edit a non-existing resource, it is created implicitly. @@ -695,13 +790,14 @@ def do_meta(self, subcmd, opts, *args): osc meta prj PRJ osc meta pkg PRJ PKG osc meta pkg PRJ PKG -e - osc meta attribute PRJ [PKG [SUBPACKAGE]] [--attribute ATTRIBUTE] [--create|--delete|--set [value_list]] Usage: - osc meta ARGS... - osc meta -e|--edit ARGS... - osc meta -F|--file ARGS... + osc meta [-r|--revision REV] ARGS... + osc meta ARGS... + osc meta [-m|--message TEXT] -e|--edit ARGS... + osc meta [-m|--message TEXT] -F|--file ARGS... osc meta pattern --delete PRJ PATTERN + osc meta attribute PRJ [PKG [SUBPACKAGE]] [--attribute ATTRIBUTE] [--create|--delete|--set [value_list]] ${cmd_option_list} """ @@ -787,17 +883,20 @@ def do_meta(self, subcmd, opts, *args): if opts.edit or opts.file: raise oscerr.WrongArgs('A pattern file argument is required.') + if cmd not in ['prj', 'prjconf'] and (opts.message or opts.revision): + raise oscerr.WrongOptions('options --revision and --message are only supported for the prj or prjconf subcommand') + # show if not opts.edit and not opts.file and not opts.delete and not opts.create and not opts.set: if cmd == 'prj': - sys.stdout.write(''.join(show_project_meta(apiurl, project))) + sys.stdout.write(''.join(show_project_meta(apiurl, project, opts.revision))) elif cmd == 'pkg': sys.stdout.write(''.join(show_package_meta(apiurl, project, package))) elif cmd == 'attribute': - sys.stdout.write(''.join(show_attribute_meta(apiurl, project, package, subpackage, + sys.stdout.write(''.join(show_attribute_meta(apiurl, project, package, subpackage, opts.attribute, opts.attribute_defaults, opts.attribute_project))) elif cmd == 'prjconf': - sys.stdout.write(''.join(show_project_conf(apiurl, project))) + sys.stdout.write(''.join(show_project_conf(apiurl, project, opts.revision))) elif cmd == 'user': r = get_user_meta(apiurl, user) if r: @@ -821,6 +920,7 @@ def do_meta(self, subcmd, opts, *args): remove_linking_repositories=opts.remove_linking_repositories, path_args=quote_plus(project), apiurl=apiurl, + msg = opts.message, template_args=({ 'name': project, 'user': conf.get_apiurl_usr(apiurl)})) @@ -837,6 +937,7 @@ def do_meta(self, subcmd, opts, *args): edit=True, path_args=quote_plus(project), apiurl=apiurl, + msg = opts.message, template_args=None) elif cmd == 'user': edit_meta(metatype='user', @@ -886,6 +987,7 @@ def do_meta(self, subcmd, opts, *args): force=opts.force, remove_linking_repositories=opts.remove_linking_repositories, apiurl=apiurl, + msg = opts.message, path_args=quote_plus(project)) elif cmd == 'pkg': edit_meta(metatype='pkg', @@ -898,6 +1000,7 @@ def do_meta(self, subcmd, opts, *args): data=f, edit=opts.edit, apiurl=apiurl, + msg = opts.message, path_args=quote_plus(project)) elif cmd == 'user': edit_meta(metatype='user', @@ -942,14 +1045,18 @@ def do_meta(self, subcmd, opts, *args): @cmdln.option('--nodevelproject', action='store_true', help='do not follow a defined devel project ' \ '(primary project where a package is developed)') + @cmdln.option('--separate-requests', action='store_true', + help='Create multiple requests instead of a single one (when command is used for entire project)') @cmdln.option('--seperate-requests', action='store_true', - help='Create multiple request instead of a single one (when command is used for entire project)') + help='Deprecated (wrong spelling - see --separate-requests)') @cmdln.option('--cleanup', action='store_true', help='remove package if submission gets accepted (default for home::branch projects)') @cmdln.option('--no-cleanup', action='store_true', help='never remove source package on accept, but update its content') @cmdln.option('--no-update', action='store_true', help='never touch source package on accept (will break source links)') + @cmdln.option('--update-link', action='store_true', + help='This transfers the source including the _link file.') @cmdln.option('-d', '--diff', action='store_true', help='show diff only instead of creating the actual request') @cmdln.option('--yes', action='store_true', @@ -974,9 +1081,27 @@ def do_submitrequest(self, subcmd, opts, *args): ${cmd_option_list} """ + def _check_service(root): + serviceinfo = root.find('serviceinfo') + if serviceinfo is not None: + # code "running" is ok, because the api will choke when trying + # to create the sr (if it is still running) + if serviceinfo.get('code') not in ('running', 'succeeded'): + print('A service run for package %s %s:' + % (root.get('name'), serviceinfo.get('code')), + file=sys.stderr) + error = serviceinfo.find('error') + if error is not None: + print('\n'.join(error.text.split('\\n'))) + sys.exit('\nPlease fix this first') + if opts.cleanup and opts.no_cleanup: raise oscerr.WrongOptions('\'--cleanup\' and \'--no-cleanup\' are mutually exclusive') + if opts.seperate_requests: + # compatibility option will be removed in the future + print('--seperate-requests is deprecated (use ' + '--separate-requests)', file=sys.stderr) src_update = conf.config['submitrequest_on_accept_action'] or None # we should check here for home::branch and default to update, but that would require OBS 1.7 server @@ -1028,9 +1153,12 @@ def do_submitrequest(self, subcmd, opts, *args): sr_ids = [] # for single request actionxml = "" - options_block = "" + options_block = "" if src_update: - options_block = """%s """ % (src_update) + options_block += """%s""" % (src_update) + if opts.update_link: + options_block + """true """ + options_block += "" # loop via all packages for checking their state for p in meta_get_packagelist(apiurl, project): @@ -1065,17 +1193,11 @@ def do_submitrequest(self, subcmd, opts, *args): print("Skipping package ", p, " since it is a source link pointing inside the project.") continue - serviceinfo = root.find('serviceinfo') - if serviceinfo != None: - if serviceinfo.get('code') != "succeeded": - print("Package ", p, " has a ", serviceinfo.get('code'), " source service") - sys.exit("Please fix this first") - if serviceinfo.get('error'): - print("Package ", p, " contains a failed source service.") - sys.exit("Please fix this first") + # check for failed source service + _check_service(root) # submitting this package - if opts.seperate_requests: + if opts.separate_requests or opts.seperate_requests: # create a single request result = create_submit_request(apiurl, project, p) if not result: @@ -1158,18 +1280,11 @@ def do_submitrequest(self, subcmd, opts, *args): raise oscerr.WrongArgs('Incorrect number of arguments.\n\n' \ + self.get_cmd_help('request')) - # check for running source service + # check for failed source service u = makeurl(apiurl, ['source', src_project, src_package]) f = http_GET(u) root = ET.parse(f).getroot() - serviceinfo = root.find('serviceinfo') - if serviceinfo != None: - if serviceinfo.get('code') != "succeeded": - print("Package ", src_package, " has a ", serviceinfo.get('code'), " source service") - sys.exit("Please fix this first") - if serviceinfo.get('error'): - print("Package ", src_package, " contains a failed source service.") - sys.exit("Please fix this first") + _check_service(root) if not opts.nodevelproject: devloc = None @@ -1202,7 +1317,7 @@ def do_submitrequest(self, subcmd, opts, *args): rev = root.get('rev') else: if linkinfo.get('project') != dst_project or linkinfo.get('package') != dst_package: - # the submit target is not link target. use merged md5sum references to + # the submit target is not link target. use merged md5sum references to # avoid not mergable sources when multiple request from same source get created. rev = root.get('srcmd5') @@ -1221,12 +1336,17 @@ def do_submitrequest(self, subcmd, opts, *args): return supersede_existing = False reqs = [] - if not opts.supersede: + if not opts.supersede and not opts.yes: (supersede_existing, reqs) = check_existing_requests(apiurl, src_project, src_package, dst_project, dst_package) + if not supersede_existing: + (supersede_existing, reqs) = check_existing_maintenance_requests(apiurl, + src_project, + [src_package], + dst_project, None) if not opts.message: difflines = [] doappend = False @@ -1244,7 +1364,10 @@ def do_submitrequest(self, subcmd, opts, *args): result = create_submit_request(apiurl, src_project, src_package, dst_project, dst_package, - opts.message, orev=rev, src_update=src_update) + opts.message, orev=rev, + src_update=src_update, dst_updatelink=opts.update_link) + print('created request id', result) + if supersede_existing: for req in reqs: change_request_state(apiurl, req.reqid, 'superseded', @@ -1254,8 +1377,6 @@ def do_submitrequest(self, subcmd, opts, *args): change_request_state(apiurl, opts.supersede, 'superseded', opts.message or '', result) - print('created request id', result) - def _actionparser(self, opt_str, value, parser): value = [] if not hasattr(parser.values, 'actiondata'): @@ -1288,7 +1409,6 @@ def _submit_request(self, args, opts, options_block): pi = [] pac = [] targetprojects = [] - rdiffmsg = [] # loop via all packages for checking their state for p in meta_get_packagelist(apiurl, project): if p.startswith("_patchinfo:"): @@ -1316,21 +1436,10 @@ def _submit_request(self, args, opts, options_block): if rdiff != '': targetprojects.append(t) pac.append(p) - rdiffmsg.append("old: %s/%s\nnew: %s/%s\n%s" % (t, p, project, p, rdiff)) else: print("Skipping package ", p, " since it has no difference with the target package.") else: print("Skipping package ", p, " since it is a source link pointing inside the project.") - if opts.diff: - print(''.join(rdiffmsg)) - sys.exit(0) - - if not opts.yes: - if pi: - print("Submitting patchinfo ", ', '.join(pi), " to ", ', '.join(targetprojects)) - print("\nEverything fine? Can we create the requests ? [y/n]") - if sys.stdin.read(1) != "y": - sys.exit("Aborted...") # loop via all packages to do the action for p in pac: @@ -1345,7 +1454,7 @@ def _submit_request(self, args, opts, options_block): (project, p, t, p, options_block) actionxml += s - return actionxml + return actionxml, [] elif len(args) <= 2: # try using the working copy at hand @@ -1399,45 +1508,29 @@ def _submit_request(self, args, opts, options_block): of the package %s primarily takes place. Please submit there instead, or use --nodevelproject to force direct submission.""" \ % (devloc, dst_package)) - if not opts.diff: - sys.exit(1) - - rdiff = None - if opts.diff: - try: - rdiff = 'old: %s/%s\nnew: %s/%s\n' % (dst_project, dst_package, src_project, src_package) - rdiff += server_diff(apiurl, - dst_project, dst_package, opts.revision, - src_project, src_package, None, True) - except: - rdiff = '' - if opts.diff: - run_pager(rdiff) - else: - reqs = get_request_list(apiurl, dst_project, dst_package, req_type='submit', req_state=['new','review']) - user = conf.get_apiurl_usr(apiurl) - myreqs = [ i for i in reqs if i.state.who == user ] - repl = 'y' - if len(myreqs) > 0 and not opts.yes: - print('You already created the following submit request: %s.' % \ - ', '.join([i.reqid for i in myreqs ])) - repl = raw_input('Supersede the old requests? (y/n/c) ') - if repl.lower() == 'c': - print('Aborting', file=sys.stderr) - sys.exit(1) + sys.exit(1) - actionxml = """ %s """ % \ - (src_project, src_package, opts.revision or show_upstream_rev(apiurl, src_project, src_package), dst_project, dst_package, options_block) - if repl.lower() == 'y': - for req in myreqs: - change_request_state(apiurl, req.reqid, 'superseded', - 'superseded by %s' % result, result) + reqs = get_request_list(apiurl, dst_project, dst_package, req_type='submit', req_state=['new', 'review']) + user = conf.get_apiurl_usr(apiurl) + myreqs = [ i for i in reqs if i.state.who == user and i.reqid != opts.supersede ] + repl = 'y' + if len(myreqs) > 0 and not opts.yes: + print('You already created the following submit request: %s.' % \ + ', '.join([i.reqid for i in myreqs ])) + repl = raw_input('Supersede the old requests? (y/n/c) ') + if repl.lower() == 'c': + print('Aborting', file=sys.stderr) + sys.exit(1) + elif repl.lower() != 'y': + myreqs = [] - if opts.supersede: - change_request_state(apiurl, opts.supersede, 'superseded', '', result) + actionxml = """ %s """ % \ + (src_project, src_package, opts.revision or show_upstream_rev(apiurl, src_project, src_package), dst_project, dst_package, options_block) + if opts.supersede: + myreqs.append(opts.supersede) - #print 'created request id', result - return actionxml + #print 'created request id', result + return actionxml, myreqs def _delete_request(self, args, opts): if len(args) < 1: @@ -1563,7 +1656,7 @@ def _set_bugowner(self, args, opts): package = """package="%s" """ % (args[2]) if user.startswith('group:'): - group = user.replace('group:','') + group = user.replace('group:', '') actionxml = """ """ % \ (project, package, group) if get_group(apiurl, group) == None: @@ -1577,7 +1670,7 @@ def _set_bugowner(self, args, opts): return actionxml - @cmdln.option('-a', '--action', action='callback', callback = _actionparser,dest = 'actions', + @cmdln.option('-a', '--action', action='callback', callback = _actionparser, dest = 'actions', help='specify action type of a request, can be : submit/delete/change_devel/add_role/set_bugowner') @cmdln.option('-m', '--message', metavar='TEXT', help='specify message TEXT') @@ -1594,8 +1687,6 @@ def _set_bugowner(self, args, opts): help='never remove source package on accept, but update its content') @cmdln.option('--no-update', action='store_true', help='never touch source package on accept (will break source links)') - @cmdln.option('-d', '--diff', action='store_true', - help='show diff only instead of creating the actual request') @cmdln.option('--yes', action='store_true', help='proceed without asking.') @cmdln.alias("creq") @@ -1603,10 +1694,10 @@ def do_createrequest(self, subcmd, opts, *args): """${cmd_name}: create multiple requests with a single command usage: - osc creq [OPTIONS] [ - -a submit SOURCEPRJ SOURCEPKG DESTPRJ [DESTPKG] - -a delete PROJECT [PACKAGE] - -a change_devel PROJECT PACKAGE DEVEL_PROJECT [DEVEL_PACKAGE] + osc creq [OPTIONS] [ + -a submit SOURCEPRJ SOURCEPKG DESTPRJ [DESTPKG] + -a delete PROJECT [PACKAGE] + -a change_devel PROJECT PACKAGE DEVEL_PROJECT [DEVEL_PACKAGE] -a add_me ROLE PROJECT [PACKAGE] -a add_group GROUP ROLE PROJECT [PACKAGE] -a add_role USER ROLE PROJECT [PACKAGE] @@ -1636,14 +1727,17 @@ def do_createrequest(self, subcmd, opts, *args): args = slash_split(args) apiurl = self.get_api_url() - + i = 0 actionsxml = "" + supersede = [] for ai in opts.actions: if ai == 'submit': args = opts.actiondata[i] i = i+1 - actionsxml += self._submit_request(args, opts, options_block) + actions, to_supersede = self._submit_request(args, opts, options_block) + actionsxml += actions + supersede.extend(to_supersede) elif ai == 'delete': args = opts.actiondata[i] actionsxml += self._delete_request(args, opts) @@ -1683,7 +1777,11 @@ def do_createrequest(self, subcmd, opts, *args): f = http_POST(u, data=xml) root = ET.parse(f).getroot() - return root.get('id') + rid = root.get('id') + for srid in supersede: + change_request_state(apiurl, srid, 'superseded', + 'superseded by %s' % rid, rid) + return rid @cmdln.option('-m', '--message', metavar='TEXT', @@ -1699,13 +1797,14 @@ def do_requestmaintainership(self, subcmd, opts, *args): """${cmd_name}: requests to add user as maintainer or bugowner usage: - osc requestmaintainership # for current user in checked out package - osc requestmaintainership USER # for specified user in checked out package - osc requestmaintainership PROJECT # for current user if cwd is not a checked out package - osc requestmaintainership PROJECT PACKAGE # for current user - osc requestmaintainership PROJECT PACKAGE USER # request for specified user - - osc requestbugownership ... # accepts same parameters but uses bugowner role + osc requestmaintainership # for current user in checked out package + osc requestmaintainership USER # for specified user in checked out package + osc requestmaintainership PROJECT # for current user if cwd is not a checked out package + osc requestmaintainership PROJECT PACKAGE # for current user + osc requestmaintainership PROJECT PACKAGE USER # request for specified user + osc requestmaintainership PROJECT PACKAGE group:NAME # request for specified group + + osc requestbugownership ... # accepts same parameters but uses bugowner role ${cmd_option_list} """ @@ -1746,7 +1845,15 @@ def do_requestmaintainership(self, subcmd, opts, *args): opts.message = edit_message() r = Request() - if role == 'bugowner': + if user.startswith('group:'): + group = user.replace('group:', '') + if role == 'bugowner': + r.add_action('set_bugowner', tgt_project=project, tgt_package=package, + group_name=group) + else: + r.add_action('add_role', tgt_project=project, tgt_package=package, + group_name=group, group_role=role) + elif role == 'bugowner': r.add_action('set_bugowner', tgt_project=project, tgt_package=package, person_name=user) else: @@ -1758,10 +1865,10 @@ def do_requestmaintainership(self, subcmd, opts, *args): @cmdln.option('-m', '--message', metavar='TEXT', help='specify message TEXT') - @cmdln.option('-r', '--repository', metavar='TEXT', - help='specify message TEXT') - @cmdln.option('--accept-in-hours', metavar='TEXT', - help='specify message time when request shall get accepted automatically. Only works with write permissions in target.') + @cmdln.option('-r', '--repository', metavar='REPOSITORY', + help='specify repository') + @cmdln.option('--accept-in-hours', metavar='HOURS', + help='specify time when request shall get accepted automatically. Only works with write permissions in target.') @cmdln.alias("dr") @cmdln.alias("dropreq") @cmdln.alias("droprequest") @@ -1795,7 +1902,7 @@ def do_deleterequest(self, subcmd, opts, *args): elif is_package_dir(os.getcwd()): project = store_read_project(os.curdir) package = store_read_package(os.curdir) - else: + else: raise oscerr.WrongArgs('Please specify at least a project.') if opts.repository: @@ -1806,7 +1913,7 @@ def do_deleterequest(self, subcmd, opts, *args): if package is not None: footer = textwrap.TextWrapper(width = 66).fill( 'please explain why you like to delete package %s of project %s' - % (package,project)) + % (package, project)) else: footer = textwrap.TextWrapper(width = 66).fill( 'please explain why you like to delete project %s' % project) @@ -1828,7 +1935,7 @@ def do_deleterequest(self, subcmd, opts, *args): def do_changedevelrequest(self, subcmd, opts, *args): """${cmd_name}: Create request to change the devel package definition. - [See http://en.opensuse.org/openSUSE:Build_Service_Collaboration + [See http://en.opensuse.org/openSUSE:Build_Service_Collaboration for information on this topic.] See the "request" command for showing and modifing existing requests. @@ -1858,7 +1965,7 @@ def do_changedevelrequest(self, subcmd, opts, *args): import textwrap footer = textwrap.TextWrapper(width = 66).fill( 'please explain why you like to change the devel project of %s/%s to %s/%s' - % (project,package,devel_project,devel_package)) + % (project, package, devel_project, devel_package)) opts.message = edit_message(footer) r = Request() @@ -1914,7 +2021,7 @@ def do_changedevelrequest(self, subcmd, opts, *args): @cmdln.option('--involved-projects', action='store_true', help='show all requests for project/packages where USER is involved') @cmdln.option('--source-buildstatus', action='store_true', - help='print the buildstatus of the source package (only works with "show")') + help='print the buildstatus of the source package (only works with "show" and the interactive review)') @cmdln.alias("rq") @cmdln.alias("review") # FIXME: rewrite this mess and split request and review @@ -1951,10 +2058,13 @@ def do_request(self, subcmd, opts, *args): "checkout" will checkout the request's source package ("submit" requests only). + "priorize" change the prioritity of a request to either "critical", "important", "moderate" or "low" + + The 'review' command has the following sub commands: "list" lists open requests that need to be reviewed by the - specified user or group + specified user or group "add" adds a person or group as reviewer to a request @@ -1975,6 +2085,7 @@ def do_request(self, subcmd, opts, *args): osc request setincident [-m TEXT] ID INCIDENT osc request supersede [-m TEXT] ID SUPERSEDING_ID osc request approvenew [-m TEXT] PROJECT + osc request priorize [-m TEXT] ID PRIORITY osc request checkout/co ID osc request clone [-m TEXT] ID @@ -2008,29 +2119,31 @@ def do_request(self, subcmd, opts, *args): if opts.state == '': opts.state = 'all' - if opts.state == '': + if opts.state == '' and subcmd != 'review': opts.state = 'declined,new,review' if args[0] == 'help': return self.do_help(['help', 'request']) - cmds = ['list', 'log', 'show', 'decline', 'reopen', 'clone', 'accept', 'approvenew', 'wipe', 'setincident', 'supersede', 'revoke', 'checkout', 'co'] + cmds = ['list', 'ls', 'log', 'show', 'decline', 'reopen', 'clone', 'accept', 'approvenew', 'wipe', 'setincident', 'supersede', 'revoke', 'checkout', 'co', 'priorize'] if subcmd != 'review' and args[0] not in cmds: raise oscerr.WrongArgs('Unknown request action %s. Choose one of %s.' \ - % (args[0],', '.join(cmds))) + % (args[0], ', '.join(cmds))) cmds = ['show', 'list', 'add', 'decline', 'accept', 'reopen', 'supersede'] if subcmd == 'review' and args[0] not in cmds: raise oscerr.WrongArgs('Unknown review action %s. Choose one of %s.' \ - % (args[0],', '.join(cmds))) + % (args[0], ', '.join(cmds))) cmd = args[0] del args[0] + if cmd == 'ls': + cmd = "list" apiurl = self.get_api_url() if cmd in ['list']: min_args, max_args = 0, 2 - elif cmd in ['supersede', 'setincident']: + elif cmd in ['supersede', 'setincident', 'priorize']: min_args, max_args = 2, 2 else: min_args, max_args = 1, 1 @@ -2041,6 +2154,8 @@ def do_request(self, subcmd, opts, *args): if cmd in ['add'] and not opts.user and not opts.group and not opts.project: raise oscerr.WrongArgs('No reviewer specified.') + source_buildstatus = conf.config['request_show_source_buildstatus'] or opts.source_buildstatus + reqid = None supersedid = None if cmd == 'list' or cmd == 'approvenew': @@ -2067,6 +2182,9 @@ def do_request(self, subcmd, opts, *args): elif cmd == 'setincident': reqid = args[0] incident = args[1] + elif cmd == 'priorize': + reqid = args[0] + priority = args[1] elif cmd in ['log', 'add', 'show', 'decline', 'reopen', 'clone', 'accept', 'wipe', 'revoke', 'checkout', 'co']: reqid = args[0] @@ -2082,6 +2200,13 @@ def do_request(self, subcmd, opts, *args): r = http_POST(url, data=opts.message) print(ET.parse(r).getroot().get('code')) + # change priority + elif cmd == 'priorize': + query = { 'cmd': 'setpriority', 'priority': priority } + url = makeurl(apiurl, ['request', reqid], query) + r = http_POST(url, data=opts.message) + print(ET.parse(r).getroot().get('code')) + # add new reviewer to existing request elif cmd in ['add'] and subcmd == 'review': query = { 'cmd': 'addreview' } @@ -2108,13 +2233,10 @@ def do_request(self, subcmd, opts, *args): results = get_request_list(apiurl, project, package, '', ['new']) else: state_list = opts.state.split(',') + if state_list == ['']: + state_list = () if opts.all: state_list = ['all'] - if subcmd == 'review': - # is there a special reason why we do not respect the passed states? - state_list = ['new'] - elif opts.state == 'all': - state_list = ['all'] else: for s in state_list: if not s in states and not s == 'all': @@ -2144,16 +2266,26 @@ def do_request(self, subcmd, opts, *args): # Check if project actually exists if result list is empty if not results: if project: + msg = 'No results for %(kind)s %(entity)s' + emsg = '%(kind)s %(entity)s does not exist' + d = {'entity': [project], 'kind': 'project'} + meth = show_project_meta + if package: + d['kind'] = 'package' + d['entity'].append(package) + meth = show_package_meta try: - show_project_meta(apiurl, project) - print('No results for {0}'.format(project)) + entity = d['entity'] + d['entity'] = '/'.join(entity) + meth(apiurl, *entity) + print(msg % d) except HTTPError: - print('Project {0} does not exist'.format(project)) + print(emsg % d) else: print('No results') return - results.sort(reverse=True) + # we must not sort the results here, since the api is doing it already "the right way" days = opts.days or conf.config['request_list_days'] since = '' try: @@ -2175,7 +2307,9 @@ def do_request(self, subcmd, opts, *args): if days == 0 or result.state.when > since or result.state.name == 'new': if (opts.interactive or conf.config['request_show_interactive']) and not opts.non_interactive: ignore_reviews = subcmd != 'review' - request_interactive_review(apiurl, result, group=opts.group, ignore_reviews=ignore_reviews) + request_interactive_review(apiurl, result, group=opts.group, + ignore_reviews=ignore_reviews, + source_buildstatus=source_buildstatus) else: print(result.list_view(), '\n') else: @@ -2186,7 +2320,7 @@ def do_request(self, subcmd, opts, *args): if cmd == 'approvenew': print("\n *** Approve them all ? [y/n] ***") if sys.stdin.read(1) == "y": - + if not opts.message: opts.message = edit_message() for result in results: @@ -2214,10 +2348,13 @@ def do_request(self, subcmd, opts, *args): return request_interactive_review(apiurl, r, 'e') elif (opts.interactive or conf.config['request_show_interactive']) and not opts.non_interactive: ignore_reviews = subcmd != 'review' - return request_interactive_review(apiurl, r, group=opts.group, ignore_reviews=ignore_reviews) + return request_interactive_review(apiurl, r, group=opts.group, + ignore_reviews=ignore_reviews, + source_buildstatus=source_buildstatus) else: print(r) - if opts.source_buildstatus: + print_comments(apiurl, 'request', reqid) + if source_buildstatus: sr_actions = r.get_actions('submit') if not sr_actions: raise oscerr.WrongOptions( '\'--source-buildstatus\' not possible ' \ @@ -2233,8 +2370,8 @@ def do_request(self, subcmd, opts, *args): except HTTPError as e: # for OBS 2.0 and before sr_actions = r.get_actions('submit') - if not sr_actions: - raise oscerr.WrongOptions('\'--diff\' not possible (request has no \'submit\' actions)') + if not r.get_actions('submit') and not r.get_actions('maintenance_incident') and not r.get_actions('maintenance_release'): + raise oscerr.WrongOptions('\'--diff\' not possible (request has no supported actions)') for action in sr_actions: diff += 'old: %s/%s\nnew: %s/%s\n' % (action.src_project, action.src_package, action.tgt_project, action.tgt_package) @@ -2275,15 +2412,14 @@ def do_request(self, subcmd, opts, *args): body = e.read() if e.code in [403]: if review.by_user: - print('No permission on review by user %s' % review.by_user) + print('No permission on review by user %s:' % review.by_user) if review.by_group: print('No permission on review by group %s' % review.by_group) if review.by_package: print('No permission on review by package %s / %s' % (review.by_project, review.by_package)) elif review.by_project: print('No permission on review by project %s' % review.by_project) - else: - print(e, file=sys.stderr) + print(e, file=sys.stderr) else: print('Request is closed, please reopen the request first before changing any reviews.') # Change state of entire request @@ -2300,7 +2436,7 @@ def do_request(self, subcmd, opts, *args): if repl.lower() != 'y': print('Aborted...', file=sys.stderr) raise oscerr.UserAbort() - + if not opts.message: tmpl = change_request_state_template(rq, state_map[cmd]) opts.message = edit_message(template=tmpl) @@ -2310,7 +2446,7 @@ def do_request(self, subcmd, opts, *args): print('Result of change request state: %s' % r) except HTTPError as e: print(e, file=sys.stderr) - details = e.headers.get('X-Opensuse-Errorcode') + details = e.hdrs.get('X-Opensuse-Errorcode') if details: print(details, file=sys.stderr) root = ET.fromstring(e.read()) @@ -2322,6 +2458,8 @@ def do_request(self, subcmd, opts, *args): print('Revoking it ...') r = change_request_state(apiurl, reqid, 'revoked', opts.message or '', supersed=supersedid, force=opts.force) + sys.exit(1) + # check for devel instances after accepted requests if cmd in ['accept']: @@ -2386,12 +2524,10 @@ def do_editmeta(self, subcmd, opts, *args): Obsolete command to edit metadata. Use 'meta' now. See the help output of 'meta'. - """ - print('This command is obsolete. Use \'osc meta ...\'.', file=sys.stderr) - print('See \'osc help meta\'.', file=sys.stderr) - #self.do_help([None, 'meta']) + print("This command is obsolete. Use 'osc meta ...'.", file=sys.stderr) + print("See 'osc help meta'.", file=sys.stderr) return 2 @@ -2753,7 +2889,7 @@ def do_copypac(self, subcmd, opts, *args): @cmdln.option('--set-release', metavar='RELEASETAG', help='rename binaries during release using this release tag') def do_release(self, subcmd, opts, *args): - """${cmd_name}: Release sources and binaries + """${cmd_name}: Release sources and binaries This command is used to transfer sources and binaries without rebuilding them. It requires defined release targets set to trigger="manual". Please refer the @@ -2764,7 +2900,7 @@ def do_release(self, subcmd, opts, *args): ${cmd_option_list} """ - + args = slash_split(args) apiurl = self.get_api_url() @@ -2774,10 +2910,11 @@ def do_release(self, subcmd, opts, *args): raise oscerr.WrongArgs('Too many arguments.') if len(args) == 0: - if is_project_dir(os.curdir): + if is_package_dir(os.curdir): + source_project = store_read_project(os.curdir) + source_package = store_read_package(os.curdir) + elif is_project_dir(os.curdir): source_project = store_read_project(os.curdir) - elif is_package_dir(os.curdir): - source_package = store_read_package(wd) else: raise oscerr.WrongArgs('Too few arguments.') if len(args) > 0: @@ -2787,9 +2924,11 @@ def do_release(self, subcmd, opts, *args): query = { 'cmd': 'release' } if opts.target_project: - query["targetproject"] = opts.target_project + query["target_project"] = opts.target_project if opts.target_repository: - query["targetrepository"] = opts.target_repository + query["target_repository"] = opts.target_repository + if opts.repo: + query["repository"] = opts.repo if opts.set_release: query["setrelease"] = opts.set_release baseurl = ['source', source_project] @@ -2870,7 +3009,7 @@ def do_copyprj(self, subcmd, opts, *args): def do_releaserequest(self, subcmd, opts, *args): """${cmd_name}: Create a request for releasing a maintenance update. - [See http://doc.opensuse.org/products/draft/OBS/obs-reference-guide_draft/cha.obs.maintenance_setup.html + [See http://openbuildservice.org/help/manuals/obs-reference-guide/cha.obs.maintenance_setup.html for information on this topic.] This command is used by the maintence team to start the release process of a maintenance update. @@ -2881,7 +3020,7 @@ def do_releaserequest(self, subcmd, opts, *args): ${cmd_option_list} """ - + # FIXME: additional parameters can be a certain repo list to create a partitial release args = slash_split(args) @@ -2916,7 +3055,7 @@ def do_releaserequest(self, subcmd, opts, *args): def do_createincident(self, subcmd, opts, *args): """${cmd_name}: Create a maintenance incident - [See http://doc.opensuse.org/products/draft/OBS/obs-reference-guide_draft/cha.obs.maintenance_setup.html + [See http://openbuildservice.org/help/manuals/obs-reference-guide/cha.obs.maintenance_setup.html for information on this topic.] This command is asking to open an empty maintence incident. This can usually only be done by a responsible @@ -2972,6 +3111,8 @@ def do_createincident(self, subcmd, opts, *args): help='Use this attribute to find default maintenance project (default is OBS:MaintenanceProject)') @cmdln.option('-m', '--message', metavar='TEXT', help='specify message TEXT') + @cmdln.option('--release-project', metavar='RELEASEPROJECT', + help='Specify the release project') @cmdln.option('--no-cleanup', action='store_true', help='do not remove source project on accept') @cmdln.option('--cleanup', action='store_true', @@ -2980,11 +3121,13 @@ def do_createincident(self, subcmd, opts, *args): help='specify incident number to merge in') @cmdln.option('--incident-project', metavar='INCIDENT_PROJECT', help='specify incident project to merge in') + @cmdln.option('-s', '--supersede', metavar='SUPERSEDE', + help='Superseding another request by this one') @cmdln.alias("mr") def do_maintenancerequest(self, subcmd, opts, *args): """${cmd_name}: Create a request for starting a maintenance incident. - [See http://doc.opensuse.org/products/draft/OBS/obs-reference-guide_draft/cha.obs.maintenance_setup.html + [See http://openbuildservice.org/help/manuals/obs-reference-guide/cha.obs.maintenance_setup.html for information on this topic.] This command is asking the maintence team to start a maintence incident based on a @@ -2993,8 +3136,17 @@ def do_maintenancerequest(self, subcmd, opts, *args): usage: osc maintenancerequest [ SOURCEPROJECT [ SOURCEPACKAGES RELEASEPROJECT ] ] + osc maintenancerequest . + + The 2nd line when issued within a package directory provides a short cut to submit a single + package (the one in the current directory) from the project of this package to be submitted + to the release project this package links to. This syntax is only valid when specified from + a package subdirectory. ${cmd_option_list} """ + #FIXME: the follow syntax would make more sense and would obsolete the --release-project parameter + # but is incompatible with the current one + # osc maintenancerequest [ SOURCEPROJECT [ RELEASEPROJECT [ SOURCEPACKAGES ] ] args = slash_split(args) apiurl = self.get_api_url() @@ -3002,14 +3154,24 @@ def do_maintenancerequest(self, subcmd, opts, *args): if opts.attribute: maintenance_attribute = opts.attribute - source_project = source_packages = target_project = release_project = opt_sourceupdate = None + source_project = target_project = release_project = opt_sourceupdate = None + source_packages = [] if len(args) == 0 and (is_project_dir(os.curdir) or is_package_dir(os.curdir)): source_project = store_read_project(os.curdir) elif len(args) == 0: raise oscerr.WrongArgs('Too few arguments.') if len(args) > 0: - source_project = args[0] + if len(args) == 1 and args[0] == '.': + if is_package_dir(os.curdir): + source_project = store_read_project(os.curdir) + source_packages = [store_read_package(os.curdir)] + p = Package(os.curdir) + release_project = p.linkinfo.project + else: + raise oscerr.WrongArgs('No package directory') + else: + source_project = args[0] if len(args) > 1: if len(args) == 2: sys.exit('Source package defined, but no release project.') @@ -3023,6 +3185,9 @@ def do_maintenancerequest(self, subcmd, opts, *args): if source_project.startswith(default_branch): opt_sourceupdate = 'cleanup' + if opts.release_project: + release_project = opts.release_project + if opts.incident_project: target_project = opts.incident_project else: @@ -3040,9 +3205,27 @@ def do_maintenancerequest(self, subcmd, opts, *args): if not opts.message: opts.message = edit_message() + supersede_existing = False + reqs = [] + if not opts.supersede: + (supersede_existing, reqs) = check_existing_maintenance_requests(apiurl, + source_project, + source_packages, + target_project, + None) # unspecified release project + r = create_maintenance_request(apiurl, source_project, source_packages, target_project, release_project, opt_sourceupdate, opts.message) print(r.reqid) + if supersede_existing: + for req in reqs: + change_request_state(apiurl, req.reqid, 'superseded', + 'superseded by %s' % r.reqid, r.reqid) + + if opts.supersede: + change_request_state(apiurl, opts.supersede, 'superseded', + opts.message or '', r.reqid) + @cmdln.option('-c', '--checkout', action='store_true', help='Checkout branched package afterwards ' \ @@ -3061,7 +3244,7 @@ def do_maintenancerequest(self, subcmd, opts, *args): @cmdln.alias('sm') @cmdln.alias('maintained') def do_mbranch(self, subcmd, opts, *args): - """${cmd_name}: Search or banch multiple instances of a package + """${cmd_name}: Search or branch multiple instances of a package This command is used for searching all relevant instances of packages and creating links of them in one project. @@ -3110,9 +3293,12 @@ def do_mbranch(self, subcmd, opts, *args): if opts.dryrun: for r in result.findall('package'): - print("%s/%s"%(r.get('project'), r.get('package'))) + line="%s/%s"%(r.get('project'), r.get('package')) + for d in r.findall('devel'): + line+=" using sources from %s/%s"%(d.get('project'), d.get('package')) + print(line) return - + apiopt = '' if conf.get_configParser().get('general', 'apiurl') != apiurl: apiopt = '-A %s ' % apiurl @@ -3160,6 +3346,12 @@ def do_mbranch(self, subcmd, opts, *args): help='create a branch pointing to a not yet existing package') @cmdln.option('-r', '--revision', metavar='rev', help='branch against a specific revision') + @cmdln.option('--linkrev', metavar='linkrev', + help='specify the used revision in the link target.') + @cmdln.option('--add-repositories-block', metavar='add_repositories_block', + help='specify the used block strategy for new repositories') + @cmdln.option('--add-repositories-rebuild', metavar='add_repositories_rebuild', + help='specify the used rebuild strategy for new repositories') def do_branch(self, subcmd, opts, *args): """${cmd_name}: Branch a package @@ -3178,6 +3370,9 @@ def do_branch(self, subcmd, opts, *args): (list of projects from oscrc:getpac_default_project) if nothing else is specfied on the command line. + In case of branch errors, where the source has currently merge + conflicts use --linkrev=base option. + usage: osc branch osc branch SOURCEPROJECT SOURCEPACKAGE @@ -3188,7 +3383,7 @@ def do_branch(self, subcmd, opts, *args): ${cmd_option_list} """ - if subcmd == 'getpac' or subcmd == 'branchco' or subcmd == 'bco': + if subcmd == 'getpac' or subcmd == 'branchco' or subcmd == 'bco': opts.checkout = True args = slash_split(args) tproject = tpackage = None @@ -3198,7 +3393,7 @@ def do_branch(self, subcmd, opts, *args): print('defaulting to %s/%s' % (def_p, args[0]), file=sys.stderr) # python has no args.unshift ??? args = [ def_p, args[0] ] - + if len(args) == 0 and is_package_dir('.'): args = (store_read_project('.'), store_read_package('.')) @@ -3213,16 +3408,38 @@ def do_branch(self, subcmd, opts, *args): if len(args) >= 4: tpackage = args[3] - exists, targetprj, targetpkg, srcprj, srcpkg = \ + try: + exists, targetprj, targetpkg, srcprj, srcpkg = \ branch_pkg(apiurl, args[0], args[1], nodevelproject=opts.nodevelproject, rev=opts.revision, + linkrev=opts.linkrev, target_project=tproject, target_package=tpackage, return_existing=opts.checkout, msg=opts.message or '', force=opts.force, noaccess=opts.noaccess, add_repositories=opts.add_repositories, + add_repositories_block=opts.add_repositories_block, + add_repositories_rebuild=opts.add_repositories_rebuild, extend_package_names=opts.extend_package_names, missingok=opts.new_package, maintenance=opts.maintenance) + except oscerr.NotMissing as e: + print('NOTE: Package target exists already via project links, link will point to given project.') + print(' A submission will initialize a new instance.') + exists, targetprj, targetpkg, srcprj, srcpkg = \ + branch_pkg(apiurl, args[0], args[1], + nodevelproject=opts.nodevelproject, rev=opts.revision, + linkrev=opts.linkrev, + target_project=tproject, target_package=tpackage, + return_existing=opts.checkout, msg=opts.message or '', + force=opts.force, noaccess=opts.noaccess, + add_repositories=opts.add_repositories, + add_repositories_block=opts.add_repositories_block, + add_repositories_rebuild=opts.add_repositories_rebuild, + extend_package_names=opts.extend_package_names, + missingok=False, + maintenance=opts.maintenance, + newinstance=opts.new_package) + if exists: print('Using existing branch project: %s' % targetprj, file=sys.stderr) @@ -3247,7 +3464,7 @@ def do_branch(self, subcmd, opts, *args): package = targetpkg or args[1] if opts.checkout: - checkout_package(apiurl, targetprj, package, server_service_files=True, + checkout_package(apiurl, targetprj, package, server_service_files=False, expand_link=True, prj_dir=targetprj) if conf.config['verbose']: print('Note: You can use "osc delete" or "osc submitpac" when done.\n') @@ -3347,7 +3564,7 @@ def do_rdelete(self, subcmd, opts, *args): ## FIXME: core.py:commitDelPackage() should have something similar rlist = get_request_list(apiurl, prj, pkg) - for rq in rlist: + for rq in rlist: print(rq) if len(rlist) >= 1 and not opts.force: print('Package has pending requests. Deleting the package will break them. '\ @@ -3366,6 +3583,35 @@ def do_rdelete(self, subcmd, opts, *args): delete_project(apiurl, prj, opts.force, msg) + @cmdln.option('-m', '--message', metavar='TEXT', + help='specify log message TEXT') + def do_lock(self, subcmd, opts, project, package=None): + """${cmd_name}: Locks a project or package. + + usage: + osc lock PROJECT [PACKAGE] + + ${cmd_option_list} + """ + apiurl = self.get_api_url() + kind = 'prj' + path_args = (project,) + if package is not None: + kind = 'pkg' + path_args = (project, package) + meta = meta_exists(kind, path_args, create_new=False, apiurl=apiurl) + root = ET.fromstring(''.join(meta)) + if root.find('lock') is not None: + print('Already locked', file=sys.stderr) + sys.exit(1) + # alternatively, we could also use the set_flag api call + # instead of manually manipulating the xml + lock = ET.SubElement(root, 'lock') + ET.SubElement(lock, 'enable') + meta = ET.tostring(root) + edit_meta(kind, path_args=path_args, data=meta, msg=opts.message) + + @cmdln.option('-m', '--message', metavar='TEXT', help='specify log message TEXT') def do_unlock(self, subcmd, opts, *args): @@ -3410,27 +3656,39 @@ def do_unlock(self, subcmd, opts, *args): @cmdln.hide(1) def do_deletepac(self, subcmd, opts, *args): - print("""${cmd_name} is obsolete ! + """${cmd_name}: + + Obsolete command to delete package. Use 'delete' or 'rdelete' now. + + See the help output of 'delete' and 'rdelete'. + """ + + print("""Command deletepac is obsolete ! Please use either osc delete for checked out packages or projects or - osc rdelete for server side operations.""") + osc rdelete for server side operations.""", file=sys.stderr) - sys.exit(1) + return 2 @cmdln.hide(1) @cmdln.option('-f', '--force', action='store_true', help='deletes a project and its packages') def do_deleteprj(self, subcmd, opts, project): - """${cmd_name} is obsolete ! + """${cmd_name}: + + Obsolete command to delete project. Use 'rdelete' now. - Please use - osc rdelete PROJECT + See the help output of 'rdelete'. """ - sys.exit(1) + + print("This command is obsolete. Use 'osc rdelete '.", file=sys.stderr) + print("See 'osc help rdelete'.", file=sys.stderr) + return 2 @cmdln.alias('metafromspec') + @cmdln.alias('updatepkgmetafromspec') @cmdln.option('', '--specfile', metavar='FILE', help='Path to specfile. (if you pass more than working copy this option is ignored)') def do_updatepacmetafromspec(self, subcmd, opts, *args): @@ -3481,11 +3739,11 @@ def do_diff(self, subcmd, opts, *args): Default: all files. osc diff --link - osc linkdiff + osc linkdiff Compare current checkout directory against the link base. - osc diff --link PROJ PACK - osc linkdiff PROJ PACK + osc diff --link PROJ PACK + osc linkdiff PROJ PACK Compare a package against the link base (ignoring working copy changes). ${cmd_option_list} @@ -3494,7 +3752,7 @@ def do_diff(self, subcmd, opts, *args): if (subcmd == 'ldiff' or subcmd == 'linkdiff'): opts.link = True args = parseargs(args) - + pacs = None if not opts.link or not len(args) == 2: pacs = findpacs(args) @@ -3786,7 +4044,7 @@ def do_pdiff(self, subcmd, opts, *args): return 1 if not noparentok and not self._pdiff_package_exists(apiurl, parent_project, parent_package): - self._pdiff_raise_non_existing_package(parent_project, parent_package, + self._pdiff_raise_non_existing_package(parent_project, parent_package, msg = 'Parent for %s/%s (%s/%s) does not exist.' % \ (project, package, parent_project, parent_package)) @@ -3946,36 +4204,6 @@ def do_prdiff(self, subcmd, opts, *args): if opts.show_not_in_old: print("new only: %s" % pkg) - @cmdln.hide(1) - @cmdln.alias('in') - def do_install(self, subcmd, opts, *args): - """${cmd_name}: install a package after build via zypper in -r - - Not implemented here. Please try - http://software.opensuse.org/search?q=osc-plugin-install&include_home=true - - - ${cmd_usage} - ${cmd_option_list} - """ - - args = slash_split(args) - args = expand_proj_pack(args) - - ## FIXME: - ## if there is only one argument, and it ends in .ymp - ## then fetch it, Parse XML to get the first - ## metapackage.group.repositories.repository.url - ## and construct zypper cmd's for all - ## metapackage.group.software.item.name - ## - ## if args[0] is already an url, the use it as is. - - cmd = "sudo zypper -p http://download.opensuse.org/repositories/%s/%s --no-refresh -v in %s" % \ - (re.sub(':', ':/', args[0]), 'openSUSE_11.4', args[1]) - print(self.do_install.__doc__) - print("Example: \n" + cmd) - def do_repourls(self, subcmd, opts, *args): """${cmd_name}: Shows URLs of .repo files @@ -3998,8 +4226,12 @@ def do_repourls(self, subcmd, opts, *args): else: raise oscerr.WrongArgs('Wrong number of arguments') - # XXX: API should somehow tell that - url_tmpl = 'http://download.opensuse.org/repositories/%s/%s/%s.repo' + root = ET.fromstring(''.join(show_configuration(apiurl))) + elm = root.find('download_url') + if elm is None or not elm.text: + raise oscerr.APIError('download_url configuration element expected') + + url_tmpl = elm.text + '/%s/%s/%s.repo' repos = get_repositories_of_project(apiurl, project) for repo in repos: print(url_tmpl % (project.replace(':', ':/'), repo, project)) @@ -4018,10 +4250,10 @@ def do_repourls(self, subcmd, opts, *args): @cmdln.option('-M', '--meta', action='store_true', help='checkout out meta data instead of sources' ) @cmdln.option('-c', '--current-dir', action='store_true', - help='place PACKAGE folder in the current directory' \ + help='place PACKAGE folder in the current directory ' \ 'instead of a PROJECT/PACKAGE directory') @cmdln.option('-o', '--output-dir', metavar='outdir', - help='place package in the specified directory' \ + help='place package in the specified directory ' \ 'instead of a PROJECT/PACKAGE directory') @cmdln.option('-s', '--source-service-files', action='store_true', help='Run source services.' ) @@ -4051,9 +4283,9 @@ def do_checkout(self, subcmd, opts, *args): while inside a project directory: osc co PACKAGE # check out PACKAGE from project - + with the result of rpm -q --qf '%%{DISTURL}\\n' PACKAGE - osc co obs://API/PROJECT/PLATFORM/REVISION-PACKAGE + osc co obs://API/PROJECT/PLATFORM/REVISION-PACKAGE ${cmd_option_list} """ @@ -4068,7 +4300,7 @@ def do_checkout(self, subcmd, opts, *args): + self.get_cmd_help('checkout')) # XXX: this too openSUSE-setup specific... - # FIXME: this should go into ~jw/patches/osc/osc.proj_pack_20101201.diff + # FIXME: this should go into ~jw/patches/osc/osc.proj_pack_20101201.diff # to be available to all subcommands via @cmdline.prep(proj_pack) # obs://build.opensuse.org/openSUSE:11.3/standard/fc6c25e795a89503e99d59da5dc94a79-screen m = re.match(r"obs://([^/]+)/(\S+)/([^/]+)/([A-Fa-f\d]+)\-(\S+)", args[0]) @@ -4121,11 +4353,11 @@ def do_checkout(self, subcmd, opts, *args): print_request_list(apiurl, project, package) elif project: - prj_dir = project + prj_dir = opts.output_dir if opts.output_dir else project if sys.platform[:3] == 'win': prj_dir = prj_dir.replace(':', ';') if os.path.exists(prj_dir): - sys.exit('osc: project \'%s\' already exists' % project) + sys.exit('osc: project directory \'%s\' already exists' % prj_dir) # check if the project does exist (show_project_meta will throw an exception) show_project_meta(apiurl, project) @@ -4135,6 +4367,13 @@ def do_checkout(self, subcmd, opts, *args): # all packages for package in meta_get_packagelist(apiurl, project): + if opts.output_dir is not None: + outputdir = os.path.join(opts.output_dir, package) + if not os.path.exists(opts.output_dir): + os.mkdir(os.path.join(opts.output_dir)) + else: + outputdir=None + # don't check out local links by default try: m = show_files_meta(apiurl, project, package) @@ -4192,6 +4431,8 @@ def do_status(self, subcmd, opts, *args): 'M' Modified '?' item is not under version control '!' item is missing (removed by non-osc command) or incomplete + 'S' item is skipped (item exceeds a file size limit or is _service:* file) + 'F' Frozen (use "osc pull" to merge conflicts) (package-only state) examples: osc st @@ -4225,7 +4466,9 @@ def do_status(self, subcmd, opts, *args): # state is != ' ' lines.append(statfrmt(st, os.path.normpath(os.path.join(prj.dir, pac)))) continue - if st == ' ' and opts.verbose or st != ' ': + if p.isfrozen(): + lines.append(statfrmt('F', os.path.normpath(os.path.join(prj.dir, pac)))) + elif st == ' ' and opts.verbose or st != ' ': lines.append(statfrmt(st, os.path.normpath(os.path.join(prj.dir, pac)))) states = p.get_status(opts.show_excluded, *excl_states) for st, filename in sorted(states, lambda x, y: cmp(x[1], y[1])): @@ -4234,10 +4477,6 @@ def do_status(self, subcmd, opts, *args): p = findpacs([arg])[0] for st, filename in sorted(p.get_status(opts.show_excluded, *excl_states), lambda x, y: cmp(x[1], y[1])): lines.append(statfrmt(st, os.path.normpath(os.path.join(p.dir, filename)))) - # arrange the lines in order: unknown files first - # filenames are already sorted - lines = [l for l in lines if l[0] == '?'] + \ - [l for l in lines if l[0] != '?'] if lines: print('\n'.join(lines)) @@ -4330,9 +4569,10 @@ def do_addremove(self, subcmd, opts, *args): pacs = findpacs(args) for p in pacs: - p.todo = list(set(p.filenamelist + p.filenamelist_unvers + p.to_be_added)) - for filename in p.todo: - if os.path.isdir(filename): + todo = list(set(p.filenamelist + p.filenamelist_unvers + p.to_be_added)) + for filename in todo: + abs_filename = os.path.join(p.absdir, filename) + if os.path.isdir(abs_filename): continue # ignore foo.rXX, foo.mine for files which are in 'C' state if os.path.splitext(filename)[0] in p.in_conflict: @@ -4341,6 +4581,9 @@ def do_addremove(self, subcmd, opts, *args): if state == '?': # TODO: should ignore typical backup files suffix ~ or .orig p.addfile(filename) + elif state == 'D' and os.path.isfile(abs_filename): + # if the "deleted" file exists in the wc, track it again + p.addfile(filename) elif state == '!': p.delete_file(filename) print(statfrmt('D', getTransActPath(os.path.join(p.dir, filename)))) @@ -4354,7 +4597,7 @@ def do_addremove(self, subcmd, opts, *args): @cmdln.option('-F', '--file', metavar='FILE', help='read log message from FILE, \'-\' denotes standard input.') @cmdln.option('-f', '--force', default=False, action="store_true", - help='ignored') + help='force commit, even if there were no changes') @cmdln.option('--skip-validation', default=False, action="store_true", help='deprecated, don\'t use it') @cmdln.option('-v', '--verbose', default=False, action="store_true", @@ -4375,8 +4618,19 @@ def do_commit(self, subcmd, opts, *args): ${cmd_usage} ${cmd_option_list} """ - args = parseargs(args) + try: + self._commit(subcmd, opts, args) + except oscerr.ExtRuntimeError as e: + print("ERROR: service run failed", e, file=sys.stderr) + return 1 + except oscerr.PackageNotInstalled as e: + print("ERROR: please install %s " % e.args, end='') + print("or use the --noservice option") + return 1 + + def _commit(self, subcmd, opts, args): + args = parseargs(args) if opts.skip_validation: print("WARNING: deprecated option --skip-validation ignored.", file=sys.stderr) @@ -4397,25 +4651,21 @@ def do_commit(self, subcmd, opts, *args): arg_list = args[:] for arg in arg_list: if conf.config['do_package_tracking'] and is_project_dir(arg): - try: - prj = Project(arg) - if not msg and not opts.no_message: - msg = edit_message() - - # check any of the packages is a link, if so, as for branching - pacs = (Package(os.path.join(prj.dir, pac)) - for pac in prj.pacs_have if prj.get_state(pac) == ' ') - can_branch = False - if any(pac.is_link_to_different_project() for pac in pacs): - repl = raw_input('Some of the packages are links to a different project!\n' \ - 'Create a local branch before commit? (y|N) ') - if repl in('y', 'Y'): - can_branch = True - - prj.commit(msg=msg, skip_local_service_run=skip_local_service_run, verbose=opts.verbose, can_branch=can_branch) - except oscerr.ExtRuntimeError as e: - print("ERROR: service run failed", e, file=sys.stderr) - return 1 + prj = Project(arg) + if not msg and not opts.no_message: + msg = edit_message() + + # check any of the packages is a link, if so, as for branching + pacs = (Package(os.path.join(prj.dir, pac)) + for pac in prj.pacs_have if prj.get_state(pac) == ' ') + can_branch = False + if any(pac.is_link_to_different_project() for pac in pacs): + repl = raw_input('Some of the packages are links to a different project!\n' \ + 'Create a local branch before commit? (y|N) ') + if repl in('y', 'Y'): + can_branch = True + + prj.commit(msg=msg, skip_local_service_run=skip_local_service_run, verbose=opts.verbose, can_branch=can_branch) args.remove(arg) pacs = findpacs(args) @@ -4453,23 +4703,22 @@ def do_commit(self, subcmd, opts, *args): if repl in('y', 'Y'): can_branch = True - prj.commit(packages, msg=msg, files=files, skip_local_service_run=skip_local_service_run, verbose=opts.verbose, can_branch=can_branch) + prj.commit(packages, msg=msg, files=files, skip_local_service_run=skip_local_service_run, verbose=opts.verbose, can_branch=can_branch, force=opts.force) store_unlink_file(prj.absdir, '_commit_msg') for pac in single_paths: p = Package(pac) if not msg and not opts.no_message: msg = get_commit_msg(p.absdir, [p]) - p.commit(msg, skip_local_service_run=skip_local_service_run, verbose=opts.verbose) + p.commit(msg, skip_local_service_run=skip_local_service_run, verbose=opts.verbose, force=opts.force) store_unlink_file(p.absdir, '_commit_msg') else: for p in pacs: - p = Package(pac) if not p.todo: p.todo = p.filenamelist + p.filenamelist_unvers p.todo.sort() if not msg and not opts.no_message: msg = get_commit_msg(p.absdir, [p]) - p.commit(msg, skip_local_service_run=skip_local_service_run, verbose=opts.verbose) + p.commit(msg, skip_local_service_run=skip_local_service_run, verbose=opts.verbose, force=opts.force) store_unlink_file(p.absdir, '_commit_msg') @cmdln.option('-r', '--revision', metavar='REV', @@ -4536,7 +4785,7 @@ def do_update(self, subcmd, opts, *args): # (a) update all packages args += prj.pacs_have # (b) fetch new packages - prj.checkout_missing_pacs(expand_link = not opts.unexpand_link) + prj.checkout_missing_pacs(opts.expand_link, opts.unexpand_link) args.remove(arg) print_request_list(prj.apiurl, prj.name) @@ -4566,9 +4815,10 @@ def do_update(self, subcmd, opts, *args): # sys.exit(1) if not rev: - if opts.expand_link and p.islink() and not p.isexpanded(): + if opts.expand_link: rev = p.latest_rev(expand=True) - print('Expanding to rev', rev) + if p.islink() and not p.isexpanded(): + print('Expanding to rev', rev) elif opts.unexpand_link and p.islink() and p.isexpanded(): rev = show_upstream_rev(p.apiurl, p.prjname, p.name, meta=p.meta) print('Unexpanding to rev', rev) @@ -4700,10 +4950,10 @@ def do_distributions(self, subcmd, opts, *args): """${cmd_name}: Shows all available distributions This command shows the available distributions. For active distributions - it shows the name, project and name of the repository and a suggested default repository name. + it shows the name, project and name of the repository and a suggested default repository name. usage: - osc distributions + osc distributions ${cmd_option_list} """ @@ -4713,8 +4963,17 @@ def do_distributions(self, subcmd, opts, *args): @cmdln.hide(1) def do_results_meta(self, subcmd, opts, *args): - print("Command results_meta is obsolete. Please use: osc results --xml") - sys.exit(1) + """${cmd_name}: + + Obsolete command to show build results. Use 'results --xml' now. + + See the help output of 'results'. + """ + + print("This command is obsolete. Use 'osc results --xml'.", + file=sys.stderr) + print("See 'osc help results'.", file=sys.stderr) + return 2 @cmdln.hide(1) @cmdln.option('-l', '--last-build', action='store_true', @@ -4726,10 +4985,17 @@ def do_results_meta(self, subcmd, opts, *args): @cmdln.option('', '--xml', action='store_true', help='generate output in XML (former results_meta)') def do_rresults(self, subcmd, opts, *args): - print("Command rresults is obsolete. Running 'osc results' instead") - self.do_results('results', opts, *args) - sys.exit(1) + """${cmd_name}: + + Obsolete command to show build results. Use 'results' now. + See the help output of 'results'. + """ + + print("Command rresults is obsolete. Running 'osc results' instead", + file=sys.stderr) + print("See 'osc help results'.", file=sys.stderr) + return self.do_results('results', opts, *args) @cmdln.option('-f', '--force', action='store_true', default=False, help="Don't ask and delete files") @@ -4776,6 +5042,10 @@ def do_rremove(self, subcmd, opts, project, package, *files): help='Show results only for specified architecture(s)') @cmdln.option('-v', '--verbose', action='store_true', default=False, help='more verbose output') + @cmdln.option('--no-multibuild', action='store_true', default=False, + help='Disable results for all direct affect packages inside of the project') + @cmdln.option('-M', '--multibuild-package', action='append', default=[], + help='Only show results for the specified multibuild package') @cmdln.option('-w', '--watch', action='store_true', default=False, help='watch the results until all finished building') @cmdln.option('', '--xml', action='store_true', default=False, @@ -4831,18 +5101,28 @@ def do_results(self, subcmd, opts, *args): if opts.xml and opts.csv: raise oscerr.WrongOptions("--xml and --csv are mutual exclusive") - args = [ apiurl, project, package, opts.last_build, opts.repo, opts.arch ] - if opts.xml: - print(''.join(show_results_meta(*args)), end=' ') - elif opts.csv: - # ignore _oldstate key - results = [r for r in get_package_results(*args) if not '_oldstate' in r] - print('\n'.join(format_results(results, opts.format))) + kwargs = {'apiurl': apiurl, 'project': project, 'package': package, + 'lastbuild': opts.last_build, 'repository': opts.repo, + 'arch': opts.arch, 'wait': opts.watch} + if opts.multibuild_package: + opts.no_multibuild = False + kwargs['multibuild_packages'] = opts.multibuild_package + if not opts.no_multibuild: + kwargs['multibuild'] = kwargs['locallink'] = True + if opts.xml or opts.csv: + for xml in get_package_results(**kwargs): + if opts.xml: + print(xml, end='') + else: + # csv formatting + results = [r for r, _ in result_xml_to_dicts(xml)] + print('\n'.join(format_results(results, opts.format))) else: - args.append(opts.verbose) - args.append(opts.watch) - args.append("\n") - get_results(*args) + kwargs['verbose'] = opts.verbose + kwargs['wait'] = opts.watch + kwargs['printJoin'] = '\n' + get_results(**kwargs) + # WARNING: this function is also called by do_results. You need to set a default there # as well when adding a new option! @@ -4906,14 +5186,25 @@ def do_prjresults(self, subcmd, opts, *args): @cmdln.hide(1) def do_rprjresults(self, subcmd, opts, *args): - print("Command rprjresults is obsolete. Please use 'osc prjresults'") - sys.exit(1) + """${cmd_name}: + + Obsolete command to show project-wide build results. Use 'prjresults' now. + + See the help output of 'prjresults'. + """ + + print("Command rprjresults is obsolete. Please use 'osc prjresults'", + file=sys.stderr) + print("See 'osc help prjresults'.", file=sys.stderr) + return 2 @cmdln.alias('bl') @cmdln.alias('blt') @cmdln.alias('buildlogtail') @cmdln.option('-l', '--last', action='store_true', help='Show the last finished log file') + @cmdln.option('-M', '--multibuild-package', metavar='MPAC', + help='get log of the specified multibuild package') @cmdln.option('-o', '--offset', metavar='OFFSET', help='get log start or end from the offset') @cmdln.option('-s', '--strip-time', action='store_true', @@ -4935,23 +5226,34 @@ def do_buildlog(self, subcmd, opts, *args): ${cmd_usage} [REPOSITORY ARCH | BUILDLOGURL] ${cmd_option_list} """ + import osc.build - repository = arch = None + project = package = repository = arch = None apiurl = self.get_api_url() if len(args) == 1 and args[0].startswith('http'): apiurl, project, package, repository, arch = parse_buildlogurl(args[0]) - elif len(args) < 2: - self.print_repos() - elif len(args) > 2: - raise oscerr.WrongArgs('Too many arguments.') else: - wd = os.curdir - package = store_read_package(wd) - project = store_read_project(wd) - repository = args[0] - arch = args[1] + project = store_read_project(os.curdir) + package = store_read_package(os.curdir) + if len(args) == 1: + repository, arch = self._find_last_repo_arch(args[0], fatal=False) + if repository is None: + # no local build with this repo was done + print('failed to guess arch, using hostarch') + repository = args[0] + arch = osc.build.hostarch + elif len(args) < 2: + self.print_repos() + elif len(args) > 2: + raise oscerr.WrongArgs('Too many arguments.') + else: + repository = args[0] + arch = args[1] + + if opts.multibuild_package: + package = package + ":" + opts.multibuild_package offset = 0 if subcmd == "blt" or subcmd == "buildlogtail": @@ -5000,6 +5302,8 @@ def print_repos(self, repos_only=False, exc_class=oscerr.WrongArgs, exc_msg='Mis @cmdln.alias('remotebuildlogtail') @cmdln.option('-l', '--last', action='store_true', help='Show the last finished log file') + @cmdln.option('-M', '--multibuild-package', metavar='MPAC', + help='show log file for specified multibuild package') @cmdln.option('-o', '--offset', metavar='OFFSET', help='get log starting or ending from the offset') @cmdln.option('-s', '--strip-time', action='store_true', @@ -5032,6 +5336,9 @@ def do_remotebuildlog(self, subcmd, opts, *args): else: project, package, repository, arch = args + if opts.multibuild_package: + package = package + ":" + opts.multibuild_package + offset = 0 if subcmd == "rblt" or subcmd == "rbuildlogtail" or subcmd == "remotebuildlogtail": query = { 'view': 'entry' } @@ -5052,6 +5359,26 @@ def do_remotebuildlog(self, subcmd, opts, *args): strip_time = opts.strip_time or conf.config['buildlog_strip_time'] print_buildlog(apiurl, project, package, repository, arch, offset, strip_time, opts.last) + def _find_last_repo_arch(self, repo=None, fatal=True): + import glob + files = glob.glob(os.path.join(os.getcwd(), store, "_buildinfo-*")) + if repo is not None: + files = [f for f in files + if os.path.basename(f).replace('_buildinfo-', '').startswith(repo + '-')] + if not files: + if not fatal: + return None, None + self.print_repos() + cfg = files[0] + # find newest file + for f in files[1:]: + if os.stat(f).st_atime > os.stat(cfg).st_atime: + cfg = f + root = ET.parse(cfg).getroot() + repo = root.get("repository") + arch = root.find("arch").text + return repo, arch + @cmdln.alias('lbl') @cmdln.option('-o', '--offset', metavar='OFFSET', help='get log starting from offset') @@ -5072,23 +5399,12 @@ def do_localbuildlog(self, subcmd, opts, *args): sys.exit(1) if len(args) == 0 or len(args) == 1: + project = store_read_project('.') package = store_read_package('.') - import glob - files = glob.glob(os.path.join(os.getcwd(), store, "_buildinfo-*")) + repo = None if args: - files = [f for f in files - if os.path.basename(f).replace('_buildinfo-', '').startswith(args[0] + '-')] - if not files: - self.print_repos() - cfg = files[0] - # find newest file - for f in files[1:]: - if os.stat(f).st_mtime > os.stat(cfg).st_mtime: - cfg = f - root = ET.parse(cfg).getroot() - project = root.get("project") - repo = root.get("repository") - arch = root.find("arch").text + repo = args[0] + repo, arch = self._find_last_repo_arch(repo) elif len(args) == 2: project = store_read_project('.') package = store_read_package('.') @@ -5099,9 +5415,11 @@ def do_localbuildlog(self, subcmd, opts, *args): self.print_repos() raise oscerr.WrongArgs('Wrong number of arguments.') + # TODO: refactor/unify buildroot calculation and move it to core.py buildroot = os.environ.get('OSC_BUILD_ROOT', conf.config['build-root']) + apihost = urlsplit(self.get_api_url())[1] buildroot = buildroot % {'project': project, 'package': package, - 'repo': repo, 'arch': arch} + 'repo': repo, 'arch': arch, 'apihost': apihost} offset = 0 if opts.offset: offset = int(opts.offset) @@ -5144,7 +5462,7 @@ def do_triggerreason(self, subcmd, opts, *args): if len(args) < 2: self.print_repos() - + apiurl = self.get_api_url() if len(args) == 2: # 2 @@ -5249,6 +5567,8 @@ def do_dependson(self, subcmd, opts, *args): @cmdln.option('-d', '--debug', action='store_true', help='verbose output of build dependencies') + @cmdln.option('-M', '--multibuild-package', metavar='MPAC', + help='Show the buildinfo of the specified multibuild package') @cmdln.option('-x', '--extra-pkgs', metavar='PAC', action='append', help='Add this package when computing the buildinfo') @cmdln.option('-p', '--prefer-pkgs', metavar='DIR', action='append', @@ -5296,7 +5616,7 @@ def do_buildinfo(self, subcmd, opts, *args): raise oscerr.WrongArgs('Incorrect number of arguments (Note: \'.\' is no package wc)') project = store_read_project('.') package = store_read_package('.') - repository, arch, build_descr = self.parse_repoarchdescr(args, ignore_descr=True) + repository, arch, build_descr = self.parse_repoarchdescr(args, ignore_descr=True, multibuild_package=opts.multibuild_package) elif len(args) == 4 or len(args) == 5: project = args[0] package = args[1] @@ -5316,10 +5636,17 @@ def do_buildinfo(self, subcmd, opts, *args): raise oscerr.WrongArgs('error: a build description is needed if \'--prefer-pkgs\' is used') elif opts.prefer_pkgs: from .build import get_prefer_pkgs + from .util import cpio print('Scanning the following dirs for local packages: %s' % ', '.join(opts.prefer_pkgs)) - prefer_pkgs, cpio = get_prefer_pkgs(opts.prefer_pkgs, arch, os.path.splitext(args[2])[1]) - cpio.add(os.path.basename(args[2]), build_descr_data) - build_descr_data = cpio.get() + cpiodata = cpio.CpioWrite() + prefer_pkgs, cpio = get_prefer_pkgs(opts.prefer_pkgs, arch, + os.path.splitext(build_descr)[1], + cpiodata) + cpiodata.add(os.path.basename(build_descr), build_descr_data) + build_descr_data = cpiodata.get() + + if opts.multibuild_package: + package = package + ":" + opts.multibuild_package print(''.join(get_buildinfo(apiurl, project, package, repository, arch, @@ -5338,7 +5665,7 @@ def do_buildconfig(self, subcmd, opts, *args): which is directly readable by the build script. It contains RPM macros and BuildRequires expansions, for example. - The argument REPOSITORY an be taken from the first column of the + The argument REPOSITORY an be taken from the first column of the 'osc repos' output. usage: @@ -5371,6 +5698,74 @@ def do_buildconfig(self, subcmd, opts, *args): print(''.join(get_buildconfig(apiurl, project, repository))) + def do_workerinfo(self, subcmd, opts, worker): + """${cmd_name}: gets the information to a worker from the server + + Examples: + osc workerinfo + + ${cmd_usage} + ${cmd_option_list} + """ + apiurl = self.get_api_url() + print(''.join(get_worker_info(apiurl, worker))) + + + @cmdln.option('', '--ignore-file', action='store_true', + help='ignore _constraints file and only check project constraints') + def do_checkconstraints(self, subcmd, opts, *args): + """${cmd_name}: check the constraints and view compliant workers + + Checks the constraints for compliant workers. + + usage: + in a package working copy: + osc checkconstraints [OPTS] REPOSITORY ARCH CONSTRAINTSFILE + osc checkconstraints [OPTS] CONSTRAINTSFILE + osc checkconstraints [OPTS] + + ${cmd_option_list} + """ + repository = arch = constraintsfile = None + project = store_read_project('.') + package = store_read_package('.') + if len(args) == 1: + constraintsfile = args[0] + elif len(args) == 2 or len(args) == 3: + repository = args[0] + arch = args[1] + if len(args) == 3: + constraintsfile = args[2] + + constraintsfile_data = None + if constraintsfile is not None: + constraintsfile_data = open(constraintsfile, 'r').read() + elif not opts.ignore_file: + if os.path.isfile("_constraints"): + constraintsfile_data = open("_constraints", 'r').read() + else: + print("No local _constraints file. Using just the project constraints") + + apiurl = self.get_api_url() + r = [] + if not arch and not repository: + result_line_templ = '%(name)-25s %(arch)-25s %(comp_workers)s' + for repo in get_repos_of_project(apiurl, project): + rmap = {} + rmap['name'] = repo.name + rmap['arch'] = repo.arch + workers = check_constraints(apiurl, project, repo.name, repo.arch, package, constraintsfile_data) + rmap['comp_workers'] = len(workers) + r.append(result_line_templ % rmap) + r.insert(0, 'Repository Arch Worker') + r.insert(1, '---------- ---- ------') + else: + r = check_constraints(apiurl, project, repository, arch, package, constraintsfile_data) + r.insert(0, 'Worker') + r.insert(1, '------') + + print('\n'.join(r)) + @cmdln.alias('repos') @cmdln.alias('platforms') def do_repositories(self, subcmd, opts, *args): @@ -5410,7 +5805,7 @@ def do_repositories(self, subcmd, opts, *args): disabled = show_package_disabled_repos(apiurl, project, package) if subcmd == 'repos_only': - for repo in get_repositories_of_project(apiurl, project): + for repo in get_repositories_of_project(apiurl, project): if (disabled is None) or ((disabled is not None) and (repo not in disabled)): print(repo) else: @@ -5418,15 +5813,16 @@ def do_repositories(self, subcmd, opts, *args): for repo in get_repos_of_project(apiurl, project): if (disabled is None) or ((disabled is not None) and (repo.name not in disabled)): data += [repo.name, repo.arch] - + for row in build_table(2, data, width=2): print(row) - def parse_repoarchdescr(self, args, noinit = False, alternative_project = None, ignore_descr = False, vm_type = None): + def parse_repoarchdescr(self, args, noinit = False, alternative_project = None, ignore_descr = False, vm_type = None, multibuild_package = None): """helper to parse the repo, arch and build description from args""" import osc.build import glob + import tempfile arg_arch = arg_repository = arg_descr = None if len(args) < 3: # some magic, works only sometimes, but people seem to like it :/ @@ -5436,7 +5832,7 @@ def parse_repoarchdescr(self, args, noinit = False, alternative_project = None, for subarch in osc.build.can_also_build.get(mainarch): all_archs.append(subarch) for arg in args: - if arg.endswith('.spec') or arg.endswith('.dsc') or arg.endswith('.kiwi') or arg == 'PKGBUILD': + if arg.endswith('.spec') or arg.endswith('.dsc') or arg.endswith('.kiwi') or arg.endswith('.livebuild') or arg == 'PKGBUILD' or arg == 'build.collax': arg_descr = arg else: if (arg == osc.build.hostarch or arg in all_archs) and arg_arch is None: @@ -5447,7 +5843,10 @@ def parse_repoarchdescr(self, args, noinit = False, alternative_project = None, elif not arg_repository: arg_repository = arg else: - raise oscerr.WrongArgs('unexpected argument: \'%s\'' % arg) +# raise oscerr.WrongArgs('\'%s\' is neither a build description nor a supported arch' % arg) + # take it as arch (even though this is no supported arch) - hopefully, this invalid + # arch will be detected below + arg_arch = arg else: arg_repository, arg_arch, arg_descr = args @@ -5457,42 +5856,45 @@ def parse_repoarchdescr(self, args, noinit = False, alternative_project = None, # store list of repos for potential offline use repolistfile = os.path.join(os.getcwd(), osc.core.store, "_build_repositories") if noinit: - if os.path.exists(repolistfile): - f = open(repolistfile, 'r') - repositories = [ l.strip()for l in f.readlines()] - f.close() + repositories = Repo.fromfile(repolistfile) else: project = alternative_project or store_read_project('.') apiurl = self.get_api_url() - repositories = get_repositories_of_project(apiurl, project) + repositories = list(get_repos_of_project(apiurl, project)) if not len(repositories): raise oscerr.WrongArgs('no repositories defined for project \'%s\'' % project) - try: - f = open(repolistfile, 'w') - f.write('\n'.join(repositories) + '\n') - f.close() - except: - pass - - if not arg_repository and len(repositories): + if alternative_project is None: + # only persist our own repos + Repo.tofile(repolistfile, repositories) + + no_repo = False + repo_names = sorted(set([r.name for r in repositories])) + if not arg_repository and repositories: + # XXX: we should avoid hardcoding repository names # Use a default value from config, but just even if it's available - # unless try standard, or openSUSE_Factory - arg_repository = repositories[-1] - for repository in (conf.config['build_repository'], 'standard', 'openSUSE_Factory'): - if repository in repositories: + # unless try standard, or openSUSE_Factory, or openSUSE_Tumbleweed + no_repo = True + arg_repository = repositories[-1].name + for repository in (conf.config['build_repository'], 'standard', 'openSUSE_Factory', 'openSUSE_Tumbleweed'): + if repository in repo_names: arg_repository = repository + no_repo = False break if not arg_repository: raise oscerr.WrongArgs('please specify a repository') - elif noinit == False and not arg_repository in repositories: - raise oscerr.WrongArgs('%s is not a valid repository, use one of: %s' % (arg_repository, ', '.join(repositories))) + if not noinit: + if not arg_repository in repo_names: + raise oscerr.WrongArgs('%s is not a valid repository, use one of: %s' % (arg_repository, ', '.join(repo_names))) + arches = [r.arch for r in repositories if r.name == arg_repository and r.arch] + if arches and not arg_arch in arches: + raise oscerr.WrongArgs('%s is not a valid arch for the repository %s, use one of: %s' % (arg_arch, arg_repository, ', '.join(arches))) # can be implemented using # reduce(lambda x, y: x + y, (glob.glob(x) for x in ('*.spec', '*.dsc', '*.kiwi'))) # but be a bit more readable :) - descr = glob.glob('*.spec') + glob.glob('*.dsc') + glob.glob('*.kiwi') + glob.glob('PKGBUILD') - + descr = glob.glob('*.spec') + glob.glob('*.dsc') + glob.glob('*.kiwi') + glob.glob('*.livebuild') + glob.glob('PKGBUILD') + glob.glob('build.collax') + # FIXME: # * request repos from server and select by build type. if not arg_descr and len(descr) == 1: @@ -5500,24 +5902,41 @@ def parse_repoarchdescr(self, args, noinit = False, alternative_project = None, elif not arg_descr: msg = None if len(descr) > 1: - # guess/prefer build descrs like the following: - # -. > . - # no guessing for arch's PKGBUILD files (the backend does not do any guessing, too) + if no_repo: + raise oscerr.WrongArgs("Repository is missing. Cannot guess build description without repository") + apiurl = self.get_api_url() + project = store_read_project('.') + bc = get_buildconfig(apiurl, project, arg_repository) + with tempfile.NamedTemporaryFile() as f: + f.write(bc) + f.flush() + recipe = return_external('/usr/lib/build/queryconfig', '--dist', f.name, 'type') + recipe = recipe.strip() + if recipe == 'arch': + recipe = 'PKGBUILD' pac = os.path.basename(os.getcwd()) if is_package_dir(os.getcwd()): pac = store_read_package(os.getcwd()) - extensions = ['spec', 'dsc', 'kiwi'] - cands = [i for i in descr for ext in extensions if i == '%s-%s.%s' % (pac, arg_repository, ext)] + if multibuild_package: + pac = multibuild_package + if recipe == 'PKGBUILD': + cands = [d for d in descr if d.startswith(recipe)] + else: + cands = [d for d in descr if d.endswith('.' + recipe)] + if len(cands) > 1: + repo_cands = [d for d in cands if d == '%s-%s.%s' % (pac, arg_repository, recipe)] + if repo_cands: + cands = repo_cands + else: + pac_cands = [d for d in cands if d == '%s.%s' % (pac, recipe)] + if pac_cands: + cands = pac_cands if len(cands) == 1: arg_descr = cands[0] - else: - cands = [i for i in descr for ext in extensions if i == '%s.%s' % (pac, ext)] - if len(cands) == 1: - arg_descr = cands[0] if not arg_descr: - msg = 'Multiple build description files found: %s' % ', '.join(descr) + msg = 'Multiple build description files found: %s' % ', '.join(cands) elif not ignore_descr: - msg = 'Missing argument: build description (spec, dsc or kiwi file)' + msg = 'Missing argument: build description (spec, dsc, kiwi or livebuild file)' try: p = Package('.') if p.islink() and not p.isexpanded(): @@ -5553,19 +5972,23 @@ def parse_repoarchdescr(self, args, noinit = False, alternative_project = None, @cmdln.option('--nochecks', '--no-checks', action='store_true', help='Do not run build checks on the resulting packages.') @cmdln.option('--no-verify', '--noverify', action='store_true', - help='Skip signature verification of packages used for build. (Global config in .oscrc: no_verify)') + help='Skip signature verification (via pgp keys) of packages used for build. (Global config in .oscrc: no_verify)') @cmdln.option('--noservice', '--no-service', action='store_true', help='Skip run of local source services as specified in _service file.') @cmdln.option('-p', '--prefer-pkgs', metavar='DIR', action='append', help='Prefer packages from this directory when installing the build-root') @cmdln.option('-k', '--keep-pkgs', metavar='DIR', help='Save built packages into this directory') + @cmdln.option('-M', '--multibuild-package', metavar='MPAC', + help='Build the specified multibuild package') @cmdln.option('-x', '--extra-pkgs', metavar='PAC', action='append', help='Add this package when installing the build-root') @cmdln.option('--root', metavar='ROOT', help='Build in specified directory') @cmdln.option('-j', '--jobs', metavar='N', help='Compile with N jobs') + @cmdln.option('-t', '--threads', metavar='N', + help='Compile with N threads') @cmdln.option('--icecream', metavar='N', help='use N parallel build jobs with icecream') @cmdln.option('--ccache', action='store_true', @@ -5587,8 +6010,12 @@ def parse_repoarchdescr(self, args, noinit = False, alternative_project = None, help='build a package which does not exist on the server') @cmdln.option('--linksources', action='store_true', help='use hard links instead of a deep copied source') + @cmdln.option('--vm-memory', metavar='TYPE', + help='use given MB for VM') @cmdln.option('--vm-type', metavar='TYPE', help='use VM type TYPE (e.g. kvm)') + @cmdln.option('--vm-telnet', metavar='TELNET', + help='Launch a telnet server inside of VM build') @cmdln.option('--target', metavar='TARGET', help='define target platform') @cmdln.option('--alternative-project', metavar='PROJECT', @@ -5613,6 +6040,10 @@ def parse_repoarchdescr(self, args, noinit = False, alternative_project = None, help=SUPPRESS_HELP) @cmdln.option('--host', metavar='HOST', help='perform the build on a remote server - user@server:~/remote/directory') + @cmdln.option('--trust-all-projects', action='store_true', + help='trust packages from all projects') + @cmdln.option('--nopreinstallimage', '--no-preinstallimage', action='store_true', + help='Do not use preinstall images for creating the build root.') def do_build(self, subcmd, opts, *args): """${cmd_name}: Build a package on your local machine @@ -5657,10 +6088,9 @@ def do_build(self, subcmd, opts, *args): import osc.build - if not os.path.exists('/usr/lib/build/debtransform') \ - and not os.path.exists('/usr/lib/lbuild/debtransform'): - sys.stderr.write('Error: you need build.rpm with version 2007.3.12 or newer.\n') - sys.stderr.write('See http://download.opensuse.org/repositories/openSUSE:/Tools/\n') + if which(conf.config['build-cmd']) is None: + print('Error: build (\'%s\') command not found' % conf.config['build-cmd'], file=sys.stderr) + print('Install the build package from http://download.opensuse.org/repositories/openSUSE:/Tools/', file=sys.stderr) return 1 if opts.debuginfo and opts.disable_debuginfo: @@ -5669,7 +6099,7 @@ def do_build(self, subcmd, opts, *args): if len(args) > 3: raise oscerr.WrongArgs('Too many arguments') - args = self.parse_repoarchdescr(args, opts.noinit or opts.offline, opts.alternative_project, False, opts.vm_type) + args = self.parse_repoarchdescr(args, opts.noinit or opts.offline, opts.alternative_project, False, opts.vm_type, opts.multibuild_package) # check for source services r = None @@ -5680,7 +6110,7 @@ def do_build(self, subcmd, opts, *args): except: print("WARNING: package is not existing on server yet") opts.local_package = True - + if opts.offline or opts.local_package or r == None: print("WARNING: source service from package or project will not be executed. This may not be the same build as on server!") elif (conf.config['local_service_run'] and not opts.noservice) and not opts.noinit: @@ -5704,12 +6134,12 @@ def do_build(self, subcmd, opts, *args): if not os.path.isdir(d): raise oscerr.WrongOptions('Preferred package location \'%s\' is not a directory' % d) - if opts.noinit and opts.offline: - raise oscerr.WrongOptions('--noinit and --offline are mutually exclusive') - if opts.offline and opts.preload: raise oscerr.WrongOptions('--offline and --preload are mutually exclusive') + if opts.preload: + opts.nopreinstallimage = True + print('Building %s for %s/%s' % (args[2], args[0], args[1])) if not opts.host: return osc.build.main(self.get_api_url(), opts, args) @@ -5720,7 +6150,7 @@ def _do_rbuild(self, subcmd, opts, *args): # drop the --argument, value tuple from the list def drop_arg2(lst, name): - if not name: + if not name: return lst while name in lst: i = lst.index(name) @@ -5745,7 +6175,7 @@ def rsync_dirs_2host(hostargs, short_name, long_name, dirs): hostprefer = os.path.join( hostpath, basename, - "%s__" % (long_name.replace('-','_')), + "%s__" % (long_name.replace('-', '_')), os.path.basename(os.path.abspath(pdir))) hostargs.append(long_name) hostargs.append(hostprefer) @@ -5759,7 +6189,7 @@ def rsync_dirs_2host(hostargs, short_name, long_name, dirs): return ret return 0 - + cwd = os.getcwd() basename = os.path.basename(cwd) @@ -5815,7 +6245,7 @@ def rsync_dirs_2host(hostargs, short_name, long_name, dirs): return ret for arg, long_name in ((opts.rsyncsrc, '--rsync-src'), (opts.overlay, '--overlay')): - if not arg: + if not arg: continue ret = rsync_dirs_2host(hostargs, None, long_name, (arg, )) if ret != 0: @@ -5857,15 +6287,21 @@ def rsync_dirs_2host(hostargs, short_name, long_name, dirs): help='specify the used build target project') @cmdln.option('--noinit', '--no-init', action='store_true', help='do not guess/verify specified repository') - @cmdln.option('-r', '--root', action='store_true', + @cmdln.option('-r', '--login-as-root', action='store_true', help='login as root instead of abuild') + @cmdln.option('--root', metavar='ROOT', + help='Path to the buildroot') @cmdln.option('-o', '--offline', action='store_true', help='Use cached data without contacting the api server') + @cmdln.option('--wipe', action='store_true', + help='Delete the build root instead of chrooting into it') + @cmdln.option('-f', '--force', action='store_true', + help='Do not ask confirmation for wipe') def do_chroot(self, subcmd, opts, *args): - """${cmd_name}: chroot into the buildchroot + """${cmd_name}: opens a shell inside of the build root - chroot into the buildchroot for the given repository, arch and build description - (NOTE: this command does not work if "build-type" is set in the config) + chroot into the build root for the given repository, arch and build description + (NOTE: this command does not work if a VM is used) usage: osc chroot [OPTS] REPOSITORY ARCH BUILD_DESCR @@ -5877,26 +6313,54 @@ def do_chroot(self, subcmd, opts, *args): """ if len(args) > 3: raise oscerr.WrongArgs('Too many arguments') - if conf.config['build-type']: + if conf.config['build-type'] and conf.config['build-type'] != "lxc": print('Not implemented for VMs', file=sys.stderr) sys.exit(1) user = 'abuild' - if opts.root: + if opts.login_as_root: user = 'root' - repository, arch, descr = self.parse_repoarchdescr(args, opts.noinit or opts.offline, opts.alternative_project) - project = opts.alternative_project or store_read_project('.') - if opts.local_package: - package = os.path.splitext(descr)[0] - else: - package = store_read_package('.') - apihost = urlsplit(self.get_api_url())[1] - buildroot = os.environ.get('OSC_BUILD_ROOT', conf.config['build-root']) \ - % {'repo': repository, 'arch': arch, 'project': project, 'package': package, 'apihost': apihost} + buildroot = opts.root + if buildroot is None: + repository, arch, descr = self.parse_repoarchdescr(args, opts.noinit or opts.offline, opts.alternative_project) + project = opts.alternative_project or store_read_project('.') + if opts.local_package: + package = os.path.splitext(os.path.basename(descr))[0] + else: + package = store_read_package('.') + apihost = urlsplit(self.get_api_url())[1] + if buildroot is None: + buildroot = os.environ.get('OSC_BUILD_ROOT', conf.config['build-root']) \ + % {'repo': repository, 'arch': arch, 'project': project, 'package': package, 'apihost': apihost} if not os.path.isdir(buildroot): raise oscerr.OscIOError(None, '\'%s\' is not a directory' % buildroot) suwrapper = os.environ.get('OSC_SU_WRAPPER', conf.config['su-wrapper']) + + # Wipe build root if --wipe was given + if opts.wipe: + sucmd = suwrapper.split() + cmd = [ conf.config['build-cmd'], '--root='+buildroot, '--wipe' ] + if sucmd[0] == 'su': + if sucmd[-1] == '-c': + sucmd.pop() + cmd = sucmd + ['-s', cmd[0], 'root', '--' ] + cmd[1:] + else: + cmd = sucmd + cmd + + if opts.force: + sys.exit(run_external(cmd[0], *cmd[1:])) + else: + # Confirm delete + print("Really wipe '%s'? [y/N]: " % buildroot) + choice = raw_input().lower() + if choice == 'y': + sys.exit(run_external(cmd[0], *cmd[1:])) + else: + print('Aborting') + sys.exit(0) + + # Normal chroot sucmd = suwrapper.split()[0] suargs = ' '.join(suwrapper.split()[1:]) if suwrapper.startswith('su '): @@ -5911,6 +6375,10 @@ def do_chroot(self, subcmd, opts, *args): @cmdln.option('', '--csv', action='store_true', help='generate output in CSV (separated by |)') + @cmdln.option('-l', '--limit', metavar='limit', + help='for setting the number of results') + @cmdln.option('-M', '--multibuild-package', metavar= 'MPAC', + help='Show the buildhistory of the specified multibuild package') @cmdln.alias('buildhist') def do_buildhistory(self, subcmd, opts, *args): """${cmd_name}: Shows the build history of a package @@ -5924,6 +6392,8 @@ def do_buildhistory(self, subcmd, opts, *args): ${cmd_option_list} """ + args = slash_split(args) + if len(args) < 2 and is_package_dir('.'): self.print_repos() @@ -5943,16 +6413,21 @@ def do_buildhistory(self, subcmd, opts, *args): else: raise oscerr.WrongArgs('Wrong number of arguments') + if opts.multibuild_package: + package = package + ":" + opts.multibuild_package + format = 'text' if opts.csv: format = 'csv' - print('\n'.join(get_buildhistory(apiurl, project, package, repository, arch, format))) + print('\n'.join(get_buildhistory(apiurl, project, package, repository, arch, format, opts.limit))) @cmdln.option('', '--csv', action='store_true', help='generate output in CSV (separated by |)') @cmdln.option('-l', '--limit', metavar='limit', help='for setting the number of results') + @cmdln.option('-M', '--multibuild-package', metavar='MPAC', + help='get jobhistory for the specified multibuild package') @cmdln.alias('jobhist') def do_jobhistory(self, subcmd, opts, *args): """${cmd_name}: Shows the job history of a project @@ -5995,6 +6470,9 @@ def do_jobhistory(self, subcmd, opts, *args): else: raise oscerr.WrongArgs('Wrong number of arguments') + if opts.multibuild_package and package is not None: + package = package + ":" + opts.multibuild_package + format = 'text' if opts.csv: format = 'csv' @@ -6003,8 +6481,16 @@ def do_jobhistory(self, subcmd, opts, *args): @cmdln.hide(1) def do_rlog(self, subcmd, opts, *args): - print("Command rlog is obsolete. Please use 'osc log'") - sys.exit(1) + """${cmd_name}: + + Obsolete command to show commit logs. Use 'log' now. + + See the help output of 'log'. + """ + + print("This command is obsolete. Use 'osc log'.", file=sys.stderr) + print("See 'osc help log'.", file=sys.stderr) + return 2 @cmdln.option('-r', '--revision', metavar='rev', @@ -6074,18 +6560,24 @@ def do_service(self, subcmd, opts, *args): usage: osc service COMMAND (inside working copy) osc service run [SOURCE_SERVICE] + osc service localrun osc service disabledrun osc service remoterun [PROJECT PACKAGE] + osc service merge [PROJECT PACKAGE] + osc service wait [PROJECT PACKAGE] COMMAND can be: - run r run defined services locally, it takes an optional parameter to run only a + run r run defined services locally, it takes an optional parameter to run only a specified source service. In case parameters exist for this one in _service file they are used. - disabledrun dr run disabled or server side only services locally and store files as local created + runall ra run all services independend of the used mode remoterun rr trigger a re-run on the server side + merge commits all server side generated files and drops the _service definition + wait waits until the service finishes and returns with an error if it failed ${cmd_option_list} """ + # disabledrun and localrun exists as well, but are considered to be obsolete args = slash_split(args) project = package = singleservice = mode = None @@ -6101,7 +6593,7 @@ def do_service(self, subcmd, opts, *args): raise oscerr.WrongArgs('Too few arguments.') if len(args) == 2: singleservice = args[1] - elif len(args) == 3 and args[0] in ('remoterun', 'rr'): + elif len(args) == 3 and args[0] in ('remoterun', 'rr', 'merge', 'wait'): project = args[1] package = args[2] else: @@ -6109,14 +6601,22 @@ def do_service(self, subcmd, opts, *args): command = args[0] - if not (command in ( 'run', 'localrun', 'disabledrun', 'remoterun', 'lr', 'dr', 'r', 'rr' )): + if not (command in ( 'runall', 'ra', 'run', 'localrun', 'disabledrun', 'remoterun', 'lr', 'dr', 'r', 'rr', 'merge', 'wait' )): raise oscerr.WrongArgs('Wrong command given.') if command == "remoterun" or command == "rr": print(runservice(apiurl, project, package)) return - if command in ('run', 'localrun', 'disabledrun', 'lr', 'dr', 'r'): + if command == "wait": + print(waitservice(apiurl, project, package)) + return + + if command == "merge": + print(mergeservice(apiurl, project, package)) + return + + if command in ('runall', 'ra', 'run', 'localrun', 'disabledrun', 'lr', 'dr', 'r'): if not is_package_dir(os.curdir): raise oscerr.WrongArgs('Local directory is no package') p = Package(".") @@ -6124,8 +6624,10 @@ def do_service(self, subcmd, opts, *args): mode = "local" elif command == "disabledrun" or command == "dr": mode = "disabled" + elif command == "runall" or command == "ra": + mode = "all" - p.run_source_services(mode, singleservice) + return p.run_source_services(mode, singleservice) @cmdln.option('-a', '--arch', metavar='ARCH', help='trigger rebuilds for a specific architecture') @@ -6133,6 +6635,8 @@ def do_service(self, subcmd, opts, *args): help='trigger rebuilds for a specific repository') @cmdln.option('-f', '--failed', action='store_true', help='rebuild all failed packages') + @cmdln.option('-M', '--multibuild-package', action='append', + help='rebuild specified multibuild package') @cmdln.option('--all', action='store_true', help='Rebuild all packages of entire project') @cmdln.alias('rebuildpac') @@ -6189,7 +6693,15 @@ def do_rebuild(self, subcmd, opts, *args): if not (opts.all or package or repo or arch or code): raise oscerr.WrongOptions('No option has been provided. If you want to rebuild all packages of the entire project, use --all option.') - print(rebuild(apiurl, project, package, repo, arch, code)) + packages = [] + if opts.multibuild_package: + for subpackage in opts.multibuild_package: + packages.append(package + ":" + subpackage) + else: + packages.append(package) + + for package in packages: + print(rebuild(apiurl, project, package, repo, arch, code)) def do_info(self, subcmd, opts, *args): @@ -6211,6 +6723,8 @@ def do_info(self, subcmd, opts, *args): @cmdln.option('-a', '--arch', metavar='ARCH', help='Restart builds for a specific architecture') + @cmdln.option('-M', '--multibuild-package', action='append', + help='Restart builds for specified multibuild package') @cmdln.option('-r', '--repo', metavar='REPO', help='Restart builds for a specific repository') @cmdln.option('--all', action='store_true', @@ -6257,11 +6771,21 @@ def do_restartbuild(self, subcmd, opts, *args): if not (opts.all or package or repo or arch): raise oscerr.WrongOptions('No option has been provided. If you want to restart all packages of the entire project, use --all option.') - print(cmdbuild(apiurl, subcmd, project, package, opts.arch, opts.repo)) + packages = [] + if opts.multibuild_package: + for subpackage in opts.multibuild_package: + packages.append(package + ":" + subpackage) + else: + packages.append(package) + + for package in packages: + print(cmdbuild(apiurl, subcmd, project, package, arch, repo)) @cmdln.option('-a', '--arch', metavar='ARCH', help='Delete all binary packages for a specific architecture') + @cmdln.option('-M', '--multibuild-package', action='append', + help='Delete all binary packages for specified multibuild package') @cmdln.option('-r', '--repo', metavar='REPO', help='Delete all binary packages for a specific repository') @cmdln.option('--build-disabled', action='store_true', @@ -6274,6 +6798,7 @@ def do_restartbuild(self, subcmd, opts, *args): help='Delete all binaries of packages which have dependency errors') @cmdln.option('--all', action='store_true', help='Delete all binaries regardless of the package status (previously default)') + @cmdln.alias("unpublish") def do_wipebinaries(self, subcmd, opts, *args): """${cmd_name}: Delete all binary packages of a certain project/package @@ -6283,6 +6808,8 @@ def do_wipebinaries(self, subcmd, opts, *args): usage: osc wipebinaries OPTS # works in checked out project dir osc wipebinaries OPTS PROJECT [PACKAGE] + osc unpublish OPTS # works in checked out project dir + osc unpublish OPTS PROJECT [PACKAGE] ${cmd_option_list} """ @@ -6325,15 +6852,28 @@ def do_wipebinaries(self, subcmd, opts, *args): if len(codes) == 0: raise oscerr.WrongOptions('No option has been provided. If you want to delete all binaries, use --all option.') - # make a new request for each code= parameter - for code in codes: - print(wipebinaries(apiurl, project, package, opts.arch, opts.repo, code)) + packages = [] + if opts.multibuild_package: + for subpackage in opts.multibuild_package: + packages.append(package + ":" + subpackage) + else: + packages.append(package) + + # make a new request for each code= parameter and for each package in packages + for package in packages: + for code in codes: + if subcmd == 'unpublish': + print(unpublish(apiurl, project, package, opts.arch, opts.repo, code)) + else: + print(wipebinaries(apiurl, project, package, opts.arch, opts.repo, code)) @cmdln.option('-q', '--quiet', action='store_true', help='do not show downloading progress') @cmdln.option('-d', '--destdir', default='./binaries', metavar='DIR', help='destination directory') + @cmdln.option('-M', '--multibuild-package', action='append', + help='get binaries from specified multibuild package') @cmdln.option('--sources', action="store_true", help='also fetch source packages') @cmdln.option('--debug', action="store_true", @@ -6348,6 +6888,7 @@ def do_getbinaries(self, subcmd, opts, *args): usage: osc getbinaries REPOSITORY # works in checked out project/package (check out all archs in subdirs) osc getbinaries REPOSITORY ARCHITECTURE # works in checked out project/package + osc getbinaries PROJECT REPOSITORY ARCHITECTRUE osc getbinaries PROJECT PACKAGE REPOSITORY ARCHITECTURE osc getbinaries PROJECT PACKAGE REPOSITORY ARCHITECTURE FILE ${cmd_option_list} @@ -6371,6 +6912,8 @@ def do_getbinaries(self, subcmd, opts, *args): architecture = args[3] if len(args) == 5: binary = args[4] + elif len(args) == 3: + project, repository, architecture = args elif len(args) >= 1 and len(args) <= 2: if is_package_dir(os.getcwd()): project = store_read_project(os.curdir) @@ -6384,7 +6927,7 @@ def do_getbinaries(self, subcmd, opts, *args): if len(args) == 2: architecture = args[1] else: - raise oscerr.WrongArgs('Need either 1, 2 or 4 arguments') + raise oscerr.WrongArgs('Need either 1, 2, 3 or 4 arguments') repos = list(get_repos_of_project(apiurl, project)) if not [i for i in repos if repository == i.name]: @@ -6394,10 +6937,17 @@ def do_getbinaries(self, subcmd, opts, *args): if architecture is None: arches = [i.arch for i in repos if repository == i.name] + if package is None: package = meta_get_packagelist(apiurl, project) - else: - package = [package] + else: + if opts.multibuild_package: + packages = [] + for subpackage in opts.multibuild_package: + packages.append(package + ":" + subpackage) + package = packages + else: + package = [package] # Set binary target directory and create if not existing target_dir = os.path.normpath(opts.destdir) @@ -6418,7 +6968,7 @@ def do_getbinaries(self, subcmd, opts, *args): if binary != None and binary != i.name: continue # skip source rpms - if not opts.sources and i.name.endswith('src.rpm'): + if not opts.sources and (i.name.endswith('src.rpm') or i.name.endswith('sdeb')): continue if not opts.debug: if i.name.find('-debuginfo-') >= 0: @@ -6468,7 +7018,7 @@ def do_my(self, subcmd, opts, *args): osc ${cmd_name} rq # list requests, excluding project 'foo' and 'bar' osc ${cmd_name} rq --exclude-project foo,bar - # list submitrequests I made + # list requests I made osc ${cmd_name} sr ${cmd_usage} @@ -6479,7 +7029,7 @@ def do_my(self, subcmd, opts, *args): """ # TODO: please clarify the difference between sr and rq. - # My first implementeation was to make no difference between requests FROM one + # My first implementeation was to make no difference between requests FROM one # of my projects and TO one of my projects. The current implementation appears to make this difference. # The usage above indicates, that sr would be a subset of rq, which is no the case with my tests. # jw. @@ -6522,7 +7072,7 @@ def do_my(self, subcmd, opts, *args): elif type in args_prj: what = {'project': ''} elif type in args_sr: - requests = get_request_list(apiurl, req_who=user, exclude_target_projects=exclude_projects) + requests = get_request_collection(apiurl, 'creator', req_who=user) for r in sorted(requests): print(r.list_view(), '\n') return @@ -6573,11 +7123,11 @@ def do_my(self, subcmd, opts, *args): requests = [] # open reviews u = makeurl(apiurl, ['request'], { - 'view' : 'collection', + 'view': 'collection', 'states': 'review', 'reviewstates': 'new', 'roles': 'reviewer', - 'user' : user, + 'user': user, }) f = http_GET(u) root = ET.parse(f).getroot() @@ -6590,10 +7140,10 @@ def do_my(self, subcmd, opts, *args): print("") # open requests u = makeurl(apiurl, ['request'], { - 'view' : 'collection', + 'view': 'collection', 'states': 'new', 'roles': 'maintainer', - 'user' : user, + 'user': user, }) f = http_GET(u) root = ET.parse(f).getroot() @@ -6606,10 +7156,10 @@ def do_my(self, subcmd, opts, *args): print("") # declined requests submitted by me u = makeurl(apiurl, ['request'], { - 'view' : 'collection', + 'view': 'collection', 'states': 'declined', 'roles': 'creator', - 'user' : user, + 'user': user, }) f = http_GET(u) root = ET.parse(f).getroot() @@ -6622,9 +7172,9 @@ def do_my(self, subcmd, opts, *args): print("") return except HTTPError as e: - if e.code == 400: - # skip it ... try again with old style below - pass + if e.code != 400: + raise e + # skip it ... try again with old style below res = get_user_projpkgs(apiurl, user, role_filter, exclude_projects, 'project' in what, 'package' in what, @@ -6634,18 +7184,19 @@ def do_my(self, subcmd, opts, *args): # if list of packages is empty user is maintainer of the whole project request_todo = {} + dummy_elm = ET.Element('dummy') roles = {} if len(what.keys()) == 2: - for i in res.get('project_id', res.get('project', {})).findall('project'): + for i in res.get('project_id', res.get('project', dummy_elm)).findall('project'): request_todo[i.get('name')] = [] roles[i.get('name')] = [p.get('role') for p in i.findall('person') if p.get('userid') == user] - for i in res.get('package_id', res.get('package', {})).findall('package'): + for i in res.get('package_id', res.get('package', dummy_elm)).findall('package'): prj = i.get('project') roles['/'.join([prj, i.get('name')])] = [p.get('role') for p in i.findall('person') if p.get('userid') == user] if not prj in request_todo or request_todo[prj] != []: request_todo.setdefault(prj, []).append(i.get('name')) else: - for i in res.get('project_id', res.get('project', {})).findall('project'): + for i in res.get('project_id', res.get('project', dummy_elm)).findall('project'): roles[i.get('name')] = [p.get('role') for p in i.findall('person') if p.get('userid') == user] if list_requests: @@ -6686,10 +7237,10 @@ def do_my(self, subcmd, opts, *args): help='match only when given attribute exists in meta data') @cmdln.option('-v', '--verbose', action='store_true', help='show more information') - @cmdln.option('-V', '--version', action='store_true', + @cmdln.option('-V', '--version', action='store_true', help='show package version, revision, and srcmd5. CAUTION: This is slow and unreliable') @cmdln.option('-i', '--involved', action='store_true', - help='show projects/packages where given person (or myself) is involved as bugowner or maintainer') + help='show projects/packages where given person (or myself) is involved as bugowner or maintainer [[{group|person}/]] default: person') @cmdln.option('-b', '--bugowner', action='store_true', help='as -i, but only bugowner') @cmdln.option('-m', '--maintainer', action='store_true', @@ -6780,14 +7331,25 @@ def build_xpath(attr, what, substr = False): # role filter role_filter = '' if opts.bugowner or opts.maintainer or opts.involved: - xpath = xpath_join(xpath, 'person/@userid = \'%s\'' % search_term, inner=True) - role_filter = '%s (%s)' % (search_term, 'person') + tmp = search_term.split(':') + if len(tmp) > 1: + search_type, search_term = [tmp[0], tmp[1]] + else: + search_type = 'person' + search_dict = { 'person' : 'userid', + 'group' : 'groupid' } + try: + search_id = search_dict[ search_type ] + except KeyError: + search_type, search_id = [ 'person', 'userid' ] + xpath = xpath_join(xpath, '%s/@%s = \'%s\'' % (search_type, search_id, search_term), inner=True) + role_filter = '%s (%s)' % (search_term, search_type) role_filter_xpath = xpath if opts.bugowner and not opts.maintainer: - xpath = xpath_join(xpath, 'person/@role=\'bugowner\'', op='and') + xpath = xpath_join(xpath, '%s/@role=\'bugowner\'' % search_type, op='and') role_filter = 'bugowner' elif not opts.bugowner and opts.maintainer: - xpath = xpath_join(xpath, 'person/@role=\'maintainer\'', op='and') + xpath = xpath_join(xpath, '%s/@role=\'maintainer\'' % search_type, op='and') role_filter = 'maintainer' if opts.limit_to_attribute: xpath = xpath_join(xpath, 'attribute/@name=\'%s\'' % opts.limit_to_attribute, op='and') @@ -6879,7 +7441,7 @@ def build_xpath(attr, what, substr = False): continue # construct a sorted, flat list # Sort by first column, follwed by second column if we have two columns, else sort by first. - results.sort(lambda x, y: ( cmp(x[0], y[0]) or + results.sort(lambda x, y: ( cmp(x[0], y[0]) or (len(x)>1 and len(y)>1 and cmp(x[1], y[1])) )) new = [] for i in results: @@ -7044,6 +7606,8 @@ def do_importsrcpkg(self, subcmd, opts, srpm): @cmdln.option('-X', '-m', '--method', default='GET', metavar='HTTP_METHOD', help='specify HTTP method to use (GET|PUT|DELETE|POST)') + @cmdln.option('-e', '--edit', default=None, action='store_true', + help='GET, edit and PUT the location') @cmdln.option('-d', '--data', default=None, metavar='STRING', help='specify string data for e.g. POST') @cmdln.option('-T', '-f', '--file', default=None, metavar='FILE', @@ -7064,6 +7628,7 @@ def do_api(self, subcmd, opts, url): Examples: osc api /source/home:user osc api -X PUT -T /etc/fstab source/home:user/test5/myfstab + osc api -e /configuration ${cmd_usage} ${cmd_option_list} @@ -7091,10 +7656,17 @@ def do_api(self, subcmd, opts, url): data=opts.data, file=opts.file, headers=opts.headers) - out = r.read() - sys.stdout.write(out) + if opts.edit: + text = edit_text(out) + r = http_request("PUT", + url, + data=text, + headers=opts.headers) + out = r.read() + + sys.stdout.write(out) @cmdln.option('-b', '--bugowner-only', action='store_true', @@ -7141,7 +7713,7 @@ def do_maintainer(self, subcmd, opts, *args): osc maintainer osc maintainer PRJ osc maintainer PRJ PKG - + The tool looks up the default responsible person for a certain project or package. When using with an OBS 2.4 (or later) server it is doing the lookup for a given binary according to the server side configuration of default owners. @@ -7189,7 +7761,7 @@ def setBugownerHelper(apiurl, project, package, bugowner): roles = [ 'bugowner', 'maintainer' ] if len(opts.role): roles = opts.role - if opts.bugowner_only or opts.bugowner or subcmd == 'bugowner': + elif opts.bugowner_only or opts.bugowner or subcmd == 'bugowner': roles = [ 'bugowner' ] args = slash_split(args) @@ -7213,7 +7785,7 @@ def setBugownerHelper(apiurl, project, package, bugowner): apiurl = self.get_api_url() - # Try the OBS 2.4 way first. + # Try the OBS 2.4 way first. if binary or opts.user or opts.group: limit = None if opts.all: @@ -7224,19 +7796,24 @@ def setBugownerHelper(apiurl, project, package, bugowner): filterroles = None if binary: searchresult = owner(apiurl, binary, "binary", usefilter=filterroles, devel=None, limit=limit) - if not searchresult and (opts.set_bugowner or opts.set_bugowner_request): - # filtered search did not succeed, but maybe we want to set an owner initially? - searchresult = owner(apiurl, binary, "binary", usefilter="", devel=None, limit=-1) - if searchresult: - print("WARNING: the binary exists, but has no matching maintainership roles defined.") - print("Do you want to set it in the container where the binary appeared first?") - result = searchresult.find('owner') - print("This is: " + result.get('project'), end=' ') - if result.get('package'): - print (" / " + result.get('package')) - repl = raw_input('\nUse this container? (y/n) ') - if repl.lower() != 'y': - searchresult = None + if searchresult != None and len(searchresult) == 0: + # We talk to an OBS 2.4 or later understanding the call + if opts.set_bugowner or opts.set_bugowner_request: + # filtered search did not succeed, but maybe we want to set an owner initially? + searchresult = owner(apiurl, binary, "binary", usefilter="", devel=None, limit=-1) + if searchresult: + print("WARNING: the binary exists, but has no matching maintainership roles defined.") + print("Do you want to set it in the container where the binary appeared first?") + result = searchresult.find('owner') + print("This is: " + result.get('project'), end=' ') + if result.get('package'): + print (" / " + result.get('package')) + repl = raw_input('\nUse this container? (y/n) ') + if repl.lower() != 'y': + searchresult = None + else: + print("Empty search result, you may want to search with other or all roles via -r ''") + return elif opts.user: searchresult = owner(apiurl, opts.user, "user", usefilter=filterroles, devel=None) elif opts.group: @@ -7361,7 +7938,7 @@ def setBugownerHelper(apiurl, project, package, bugowner): else: print("Defined in project: ", definingproject) - if prj: + if prj: # not for user/group search for role in roles: if opts.bugowner and not len(maintainers.get(role, [])): @@ -7417,21 +7994,29 @@ def do_whois(self, subcmd, opts, *usernames): @cmdln.option('-r', '--revision', metavar='rev', help='print out the specified revision') @cmdln.option('-e', '--expand', action='store_true', - help='force expansion of linked packages.') + help='(default) force expansion of linked packages.') @cmdln.option('-u', '--unexpand', action='store_true', help='always work with unexpanded packages.') @cmdln.option('-M', '--meta', action='store_true', help='list meta data files') + @cmdln.alias('blame') @cmdln.alias('less') def do_cat(self, subcmd, opts, *args): """${cmd_name}: Output the content of a file to standard output Examples: + osc cat file osc cat project package file osc cat project/package/file osc cat http://api.opensuse.org/build/.../_log osc cat http://api.opensuse.org/source/../_link + osc less file + osc less project package file + + osc blame file + osc blame project package file + ${cmd_usage} ${cmd_option_list} """ @@ -7444,43 +8029,39 @@ def do_cat(self, subcmd, opts, *args): opts.file = None return self.do_api('list', opts, *args) - - args = slash_split(args) - if len(args) != 3: + project = package = filename = None + if len(args) == 3: + project = args[0] + package = args[1] + filename = args[2] + elif len(args) == 1 and is_package_dir(os.getcwd()): + project = store_read_project(os.curdir) + package = store_read_package(os.curdir) + filename = args[0] + else: raise oscerr.WrongArgs('Wrong number of arguments.') + rev, dummy = parseRevisionOption(opts.revision) apiurl = self.get_api_url() query = { } + if subcmd == 'blame': + query['view'] = "blame" if opts.meta: query['meta'] = 1 if opts.revision: query['rev'] = opts.revision - if opts.expand: - query['rev'] = show_upstream_srcmd5(apiurl, args[0], args[1], expand=True, revision=opts.revision, meta=opts.meta) - u = makeurl(apiurl, ['source', args[0], args[1], args[2]], query=query) - try: - if subcmd == 'less': - f = http_GET(u) - run_pager(''.join(f.readlines())) - else: - for data in streamfile(u): - sys.stdout.write(data) - except HTTPError as e: - if e.code == 404 and not opts.expand and not opts.unexpand: - print('expanding link...', file=sys.stderr) - query['rev'] = show_upstream_srcmd5(apiurl, args[0], args[1], expand=True, revision=opts.revision) - u = makeurl(apiurl, ['source', args[0], args[1], args[2]], query=query) - if subcmd == "less": - f = http_GET(u) - run_pager(''.join(f.readlines())) - else: - for data in streamfile(u): - sys.stdout.write(data) - else: - e.osc_msg = 'If linked, try: cat -e' - raise e + if not opts.unexpand: + query['rev'] = show_upstream_srcmd5(apiurl, project, package, expand=True, revision=opts.revision, meta=opts.meta) + query['expand'] = 1 # important for blame case to follow links in old revisions + u = makeurl(apiurl, ['source', project, package, filename], query=query) + if subcmd == 'less': + f = http_GET(u) + run_pager(''.join(f.readlines())) + else: + for data in streamfile(u): + sys.stdout.write(data) # helper function to download a file from a specific revision @@ -7546,9 +8127,8 @@ def do_repairlink(self, subcmd, opts, *args): raise oscerr.APIError('source link is not broken') workingrev = None - baserev = linkinfo.get('baserev') - if baserev != None: - query = { 'rev': 'latest', 'linkrev': baserev } + if linkinfo.get('baserev'): + query = { 'rev': 'latest', 'linkrev': 'base' } u = makeurl(apiurl, ['source', prj, package], query=query) f = http_GET(u) root = ET.parse(f).getroot() @@ -7951,39 +8531,35 @@ def do_vc(self, subcmd, opts, *args): except IndexError: pass + cmd_list = [conf.config['vc-cmd']] if meego_style: if not os.path.exists('/usr/bin/vc'): print('Error: you need meego-packaging-tools for /usr/bin/vc command', file=sys.stderr) return 1 cmd_list = ['/usr/bin/vc'] - else: - if not os.path.exists('/usr/lib/build/vc'): - print('Error: you need build.rpm with version 2009.04.17 or newer', file=sys.stderr) - print('See http://download.opensuse.org/repositories/openSUSE:/Tools/', file=sys.stderr) - return 1 - - cmd_list = ['/usr/lib/build/vc'] - - # set user's email if no mailaddr exists - if 'mailaddr' not in os.environ: + elif which(cmd_list[0]) is None: + print('Error: vc (\'%s\') command not found' % cmd_list[0], file=sys.stderr) + print('Install the build package from http://download.opensuse.org/repositories/openSUSE:/Tools/', file=sys.stderr) + return 1 - if len(args) and is_package_dir(args[0]): - apiurl = store_read_apiurl(args[0]) - else: - apiurl = self.get_api_url() + if args and is_package_dir(args[0]): + apiurl = store_read_apiurl(args[0]) + else: + apiurl = self.get_api_url() + # set user's email if the mailaddr env variable is not set + if 'mailaddr' in os.environ: + pass + elif 'email' in conf.config['api_host_options'][apiurl]: + os.environ['mailaddr'] = conf.config['api_host_options'][apiurl]['email'] + else: user = conf.get_apiurl_usr(apiurl) - data = get_user_data(apiurl, user, 'email') if data: os.environ['mailaddr'] = data[0] else: print('Try env mailaddr=...', file=sys.stderr) - # mailaddr can be overrided by config one - if 'email' in conf.config['api_host_options'][apiurl]: - os.environ['mailaddr'] = conf.config['api_host_options'][apiurl]['email'] - if meego_style: if opts.message or opts.just_edit: print('Warning: to edit MeeGo style changelog, opts will be ignored.', file=sys.stderr) @@ -8016,7 +8592,7 @@ def do_mv(self, subcmd, opts, source, dest): """ if not os.path.isfile(source): - raise oscerr.WrongArgs("Source file '%s' does not exists or is no file" % source) + raise oscerr.WrongArgs("Source file '%s' does not exist or is not a file" % source) if not opts.force and os.path.isfile(dest): raise oscerr.WrongArgs("Dest file '%s' already exists" % dest) if os.path.isdir(dest): @@ -8230,6 +8806,73 @@ def do_clean(self, subcmd, opts, *args): if not opts.dry_run: os.unlink(os.path.join(p.absdir, filename)) + @cmdln.option('-c', '--comment', + help='comment text', metavar='COMMENT') + @cmdln.option('-p', '--parent', + help='reply to comment with parent id', metavar='PARENT') + def do_comment(self, subcmd, opts, *args): + """${cmd_name}: List / create / delete comments + + On create: + If -p is given a reply to the ID is created. Otherwise + a toplevel comment is created. + If -c is not given the default editor will be opened and + you can type your comment + + usage: + osc comment list package PROJECT PACKAGE + osc comment list project PROJECT + osc comment list request REQUEST_ID + + osc comment create [-p PARENT_ID] [-c COMMENT] package PROJECT PACKAGE + osc comment create [-p PARENT_ID] [-c COMMENT] project PROJECT + osc comment create [-p PARENT_ID] [-c COMMENT] request REQUEST_ID + + osc comment delete ID + + """ + + comment = None + args = slash_split(args) + apiurl = self.get_api_url() + + if len(args) < 2: + raise oscerr.WrongArgs('Incorrect number of arguments.\n\n' \ + + self.get_cmd_help('comment')) + + cmds = ['list', 'create', 'delete'] + if args[0] not in cmds: + raise oscerr.WrongArgs('Unknown comment action %s. Choose one of %s.' \ + % (args[0], ', '.join(cmds))) + + comment_targets = ['package', 'project', 'request'] + if args[0] != 'delete' and args[1] not in comment_targets: + raise oscerr.WrongArgs('Unknown comment target %s. Choose one of %s.' \ + % (args[1], ', '.join(comment_targets))) + + if args[1] == 'package' and len(args) != 4: + raise oscerr.WrongArgs('Please use PROJECT PACKAGE') + elif args[1] == 'project' and len(args) != 3: + raise oscerr.WrongArgs('Please use PROJECT') + elif args[1] == 'request' and len(args) != 3: + raise oscerr.WrongArgs('Please use REQUEST') + elif args[0] == 'delete' and len(args) != 2: + raise oscerr.WrongArgs('Please use COMMENT_ID') + if not opts.comment and args[0] == 'create': + comment = edit_text() + else: + comment = opts.comment + + if args[0] == 'list': + print_comments(apiurl, args[1], *args[2:]) + elif args[0] == 'create': + result = create_comment(apiurl, args[1], comment, + *args[2:], parent=opts.parent) + print(result) + elif args[0] == 'delete': + result = delete_comment(apiurl, args[1]) + print(result) + def _load_plugins(self): plugin_dirs = [ '/usr/lib/osc-plugins', @@ -8239,6 +8882,7 @@ def _load_plugins(self): for plugin_dir in plugin_dirs: if not os.path.isdir(plugin_dir): continue + sys.path.append(plugin_dir) for extfile in os.listdir(plugin_dir): if not extfile.endswith('.py'): continue diff --git a/osc/conf.py b/osc/conf.py index 1ff8c08..e49014d 100644 --- a/osc/conf.py +++ b/osc/conf.py @@ -41,6 +41,8 @@ import os import re import sys +import ssl +import warnings try: from http.cookiejar import LWPCookieJar, CookieJar @@ -49,15 +51,15 @@ from urllib.parse import urlsplit from urllib.error import URLError from urllib.request import HTTPBasicAuthHandler, HTTPCookieProcessor, HTTPPasswordMgrWithDefaultRealm, ProxyHandler - from urllib.request import AbstractHTTPHandler, build_opener, proxy_bypass + from urllib.request import AbstractHTTPHandler, build_opener, proxy_bypass, HTTPSHandler except ImportError: #python 2.x from cookielib import LWPCookieJar, CookieJar from httplib import HTTPConnection, HTTPResponse from StringIO import StringIO from urlparse import urlsplit - from urllib2 import URLError, HTTPBasicAuthHandler, HTTPCookieProcessor, HTTPPasswordMgrWithDefaultRealm, ProxyHandler - from urllib2 import AbstractHTTPHandler, build_opener, proxy_bypass + from urllib2 import URLError, HTTPBasicAuthHandler, HTTPCookieProcessor, HTTPPasswordMgrWithDefaultRealm, ProxyHandler, AbstractBasicAuthHandler + from urllib2 import AbstractHTTPHandler, build_opener, proxy_bypass, HTTPSHandler from . import OscConfigParser from osc import oscerr @@ -112,6 +114,9 @@ def _get_processors(): 'build-vmdisk-rootsize': '', # optional for VM builds 'build-vmdisk-swapsize': '', # optional for VM builds 'build-vmdisk-filesystem': '', # optional for VM builds + 'build-vm-user': '', # optional for VM builds + 'build-kernel': '', # optional for VM builds + 'build-initrd': '', # optional for VM builds 'build-jobs': _get_processors(), 'builtin_signature_check': '1', # by default use builtin check for verify pkgs @@ -124,6 +129,7 @@ def _get_processors(): 'http_full_debug': '0', 'http_retries': '3', 'verbose': '1', + 'no_preinstallimage': '0', 'traceback': '0', 'post_mortem': '0', 'use_keyring': '0', @@ -158,6 +164,7 @@ def _get_processors(): # what to do with the source package if the submitrequest has been accepted 'submitrequest_on_accept_action': '', 'request_show_interactive': '0', + 'request_show_source_buildstatus': '0', # if a review is accepted in interactive mode and a group # was specified the review will be accepted for this group 'review_inherit_group': '0', @@ -172,6 +179,8 @@ def _get_processors(): 'maintenance_attribute': 'OBS:MaintenanceProject', 'maintained_update_project_attribute': 'OBS:UpdateProject', 'show_download_progress': '0', + # path to the vc script + 'vc-cmd': '/usr/lib/build/vc' } # being global to this module, this dict can be accessed from outside @@ -180,8 +189,8 @@ def _get_processors(): boolean_opts = ['debug', 'do_package_tracking', 'http_debug', 'post_mortem', 'traceback', 'check_filelist', 'plaintext_passwd', 'checkout_no_colon', 'checkout_rooted', 'check_for_request_on_action', 'linkcontrol', 'show_download_progress', 'request_show_interactive', - 'review_inherit_group', 'use_keyring', 'gnome_keyring', 'no_verify', 'builtin_signature_check', 'http_full_debug', - 'include_request_from_project', 'local_service_run', 'buildlog_strip_time'] + 'request_show_source_buildstatus', 'review_inherit_group', 'use_keyring', 'gnome_keyring', 'no_verify', 'builtin_signature_check', + 'http_full_debug', 'include_request_from_project', 'local_service_run', 'buildlog_strip_time', 'no_preinstallimage'] api_host_options = ['user', 'pass', 'passx', 'aliases', 'http_headers', 'email', 'sslcertck', 'cafile', 'capath', 'trusted_prj'] @@ -225,6 +234,12 @@ def _get_processors(): # e.g. /var/tmp/FILE.swap #build-swap = /var/tmp/FILE.swap +# build-kernel is the boot kernel used for VM builds +#build-kernel = /boot/vmlinuz + +# build-initrd is the boot initrd used for VM builds +#build-initrd = /boot/initrd + # build-memory is the amount of memory used in the VM # value in MB - e.g. 512 #build-memory = 512 @@ -372,17 +387,18 @@ def _get_processors(): def parse_apisrv_url(scheme, apisrv): if apisrv.startswith('http://') or apisrv.startswith('https://'): - return urlsplit(apisrv)[0:2] + url = apisrv elif scheme != None: - # the split/join is needed to get a proper url (e.g. without a trailing slash) - return urlsplit(urljoin(scheme, apisrv))[0:2] + url = scheme + apisrv else: msg = 'invalid apiurl \'%s\' (specify the protocol (http:// or https://))' % apisrv raise URLError(msg) + scheme, url, path = urlsplit(url)[0:3] + return scheme, url, path.rstrip('/') -def urljoin(scheme, apisrv): - return '://'.join([scheme, apisrv]) +def urljoin(scheme, apisrv, path=''): + return '://'.join([scheme, apisrv]) + path def is_known_apiurl(url): @@ -391,6 +407,21 @@ def is_known_apiurl(url): return apiurl in config['api_host_options'] +def extract_known_apiurl(url): + """ + Return longest prefix of given url that is known apiurl, + None if there is no known apiurl that is prefix of given url. + """ + scheme, host, path = parse_apisrv_url(None, url) + p = path.split('/') + while p: + apiurl = urljoin(scheme, host, '/'.join(p)) + if apiurl in config['api_host_options']: + return apiurl + p.pop() + return None + + def get_apiurl_api_host_options(apiurl): """ Returns all apihost specific options for the given apiurl, None if @@ -431,10 +462,9 @@ def get_apiurl_usr(apiurl): # So we need to build a new opener everytime we switch the # apiurl (because different apiurls may have different # cafile/capath locations) -def _build_opener(url): +def _build_opener(apiurl): from osc.core import __version__ global config - apiurl = urljoin(*parse_apisrv_url(None, url)) if 'last_opener' not in _build_opener.__dict__: _build_opener.last_opener = (None, None) if apiurl == _build_opener.last_opener[0]: @@ -450,38 +480,33 @@ def _build_opener(url): # workaround for http://bugs.python.org/issue9639 authhandler_class = HTTPBasicAuthHandler - if sys.version_info >= (2, 6, 6) and sys.version_info < (2, 7, 1) \ - and not 'reset_retry_count' in dir(HTTPBasicAuthHandler): - print('warning: your urllib2 version seems to be broken. ' \ - 'Using a workaround for http://bugs.python.org/issue9639', file=sys.stderr) - + if sys.version_info >= (2, 6, 6) and sys.version_info < (2, 7, 9): class OscHTTPBasicAuthHandler(HTTPBasicAuthHandler): - def http_error_401(self, *args): - response = HTTPBasicAuthHandler.http_error_401(self, *args) - self.retried = 0 - return response + # The following two functions were backported from upstream 2.7. + def http_error_auth_reqed(self, authreq, host, req, headers): + authreq = headers.get(authreq, None) + + if authreq: + mo = AbstractBasicAuthHandler.rx.search(authreq) + if mo: + scheme, quote, realm = mo.groups() + if quote not in ['"', "'"]: + warnings.warn("Basic Auth Realm was unquoted", + UserWarning, 2) + if scheme.lower() == 'basic': + return self.retry_http_basic_auth(host, req, realm) - def http_error_404(self, *args): - self.retried = 0 - return None - - authhandler_class = OscHTTPBasicAuthHandler - elif sys.version_info >= (2, 6, 6) and sys.version_info < (2, 7, 99): - class OscHTTPBasicAuthHandler(HTTPBasicAuthHandler): - def http_error_404(self, *args): - self.reset_retry_count() - return None - - authhandler_class = OscHTTPBasicAuthHandler - elif sys.version_info >= (2, 6, 5) and sys.version_info < (2, 6, 6): - # workaround for broken urllib2 in python 2.6.5: wrong credentials - # lead to an infinite recursion - class OscHTTPBasicAuthHandler(HTTPBasicAuthHandler): def retry_http_basic_auth(self, host, req, realm): - # don't retry if auth failed - if req.get_header(self.auth_header, None) is not None: + user, pw = self.passwd.find_user_password(realm, host) + if pw is not None: + raw = "%s:%s" % (user, pw) + auth = 'Basic %s' % base64.b64encode(raw).strip() + if req.get_header(self.auth_header, None) == auth: + return None + req.add_unredirected_header(self.auth_header, auth) + return self.parent.open(req, timeout=req.timeout) + else: return None - return HTTPBasicAuthHandler.retry_http_basic_auth(self, host, req, realm) authhandler_class = OscHTTPBasicAuthHandler @@ -511,20 +536,27 @@ def retry_http_basic_auth(self, host, req, realm): capath = i break if not cafile and not capath: - raise Exception('No CA certificates found') + raise oscerr.OscIOError(None, 'No CA certificates found') ctx = oscssl.mySSLContext() if ctx.load_verify_locations(capath=capath, cafile=cafile) != 1: - raise Exception('No CA certificates found') + raise oscerr.OscIOError(None, 'No CA certificates found') opener = m2urllib2.build_opener(ctx, oscssl.myHTTPSHandler(ssl_context=ctx, appname='osc'), HTTPCookieProcessor(cookiejar), authhandler, proxyhandler) else: + handlers = [HTTPCookieProcessor(cookiejar), authhandler, proxyhandler] + try: + # disable ssl cert check in python >= 2.7.9 + ctx = ssl._create_unverified_context() + handlers.append(HTTPSHandler(context=ctx)) + except AttributeError: + pass print("WARNING: SSL certificate checks disabled. Connection is insecure!\n", file=sys.stderr) - opener = build_opener(HTTPCookieProcessor(cookiejar), authhandler, proxyhandler) + opener = build_opener(*handlers) opener.addheaders = [('User-agent', 'osc/%s' % __version__)] _build_opener.last_opener = (apiurl, opener) return opener -def init_basicauth(config): +def init_basicauth(config, config_mtime): """initialize urllib2 with the credentials for Basic Authentication""" def filterhdrs(meth, ishdr, *hdrs): @@ -577,6 +609,9 @@ def urllib2_debug_init(self, debuglevel=0): cookiejar = LWPCookieJar(cookie_file) try: cookiejar.load(ignore_discard=True) + if int(round(config_mtime)) > int(os.stat(cookie_file).st_mtime): + cookiejar.clear() + cookiejar.save() except IOError: try: fd = os.open(cookie_file, os.O_CREAT | os.O_WRONLY | os.O_TRUNC, 0o600) @@ -634,18 +669,18 @@ def config_set_option(section, opt, val=None, delete=False, update=True, **kwarg general_opts = [i for i in DEFAULTS.keys() if not i in ['user', 'pass', 'passx']] if section != 'general': section = config['apiurl_aliases'].get(section, section) - scheme, host = \ + scheme, host, path = \ parse_apisrv_url(config.get('scheme', 'https'), section) - section = urljoin(scheme, host) + section = urljoin(scheme, host, path) sections = {} for url in cp.sections(): if url == 'general': sections[url] = url else: - scheme, host = \ + scheme, host, path = \ parse_apisrv_url(config.get('scheme', 'https'), url) - apiurl = urljoin(scheme, host) + apiurl = urljoin(scheme, host, path) sections[apiurl] = url section = sections.get(section.rstrip('/'), section) @@ -692,19 +727,20 @@ def write_initial_config(conffile, entries, custom_template=''): config.update(entries) # at this point use_keyring and gnome_keyring are str objects if config['use_keyring'] == '1' and GENERIC_KEYRING: - protocol, host = \ + protocol, host, path = \ parse_apisrv_url(None, config['apiurl']) keyring.set_password(host, config['user'], config['pass']) config['pass'] = '' config['passx'] = '' elif config['gnome_keyring'] == '1' and GNOME_KEYRING: - protocol, host = \ + protocol, host, path = \ parse_apisrv_url(None, config['apiurl']) gnomekeyring.set_network_password_sync( user=config['user'], password=config['pass'], protocol=protocol, - server=host) + server=host, + object=path) config['user'] = '' config['pass'] = '' config['passx'] = '' @@ -731,19 +767,20 @@ def add_section(filename, url, user, passwd): # Section might have existed, but was empty pass if config['use_keyring'] and GENERIC_KEYRING: - protocol, host = parse_apisrv_url(None, url) + protocol, host, path = parse_apisrv_url(None, url) keyring.set_password(host, user, passwd) cp.set(url, 'keyring', '1') cp.set(url, 'user', user) cp.remove_option(url, 'pass') cp.remove_option(url, 'passx') elif config['gnome_keyring'] and GNOME_KEYRING: - protocol, host = parse_apisrv_url(None, url) + protocol, host, path = parse_apisrv_url(None, url) gnomekeyring.set_network_password_sync( user=user, password=passwd, protocol=protocol, - server=host) + server=host, + object=path) cp.set(url, 'keyring', '1') cp.remove_option(url, 'pass') cp.remove_option(url, 'passx') @@ -826,8 +863,8 @@ def get_config(override_conffile=None, aliases = {} for url in [x for x in cp.sections() if x != 'general']: # backward compatiblity - scheme, host = parse_apisrv_url(config.get('scheme', 'https'), url) - apiurl = urljoin(scheme, host) + scheme, host, path = parse_apisrv_url(config.get('scheme', 'https'), url) + apiurl = urljoin(scheme, host, path) user = None password = None if config['use_keyring'] and GENERIC_KEYRING: @@ -841,7 +878,7 @@ def get_config(override_conffile=None, elif config['gnome_keyring'] and GNOME_KEYRING: # Read from gnome keyring if available try: - gk_data = gnomekeyring.find_network_password_sync(protocol=scheme, server=host) + gk_data = gnomekeyring.find_network_password_sync(protocol=scheme, server=host, object=path) if not 'user' in gk_data[0]: raise oscerr.ConfigError('no user found in keyring', conffile) user = gk_data[0]['user'] @@ -923,6 +960,8 @@ def get_config(override_conffile=None, api_host_options[apiurl][key] = cp.getboolean(url, key) else: api_host_options[apiurl][key] = cp.get(url, key) + if cp.has_option(url, 'build-root', proper=True): + api_host_options[apiurl]['build-root'] = cp.get(url, 'build-root', raw=True) if not 'sslcertck' in api_host_options[apiurl]: api_host_options[apiurl]['sslcertck'] = True @@ -986,7 +1025,7 @@ def get_config(override_conffile=None, raise e # finally, initialize urllib2 for to use the credentials for Basic Authentication - init_basicauth(config) + init_basicauth(config, os.stat(conffile).st_mtime) # vim: sw=4 et diff --git a/osc/core.py b/osc/core.py index a6ee49c..95faa3f 100644 --- a/osc/core.py +++ b/osc/core.py @@ -5,7 +5,7 @@ from __future__ import print_function -__version__ = '0.145git' +__version__ = '0.157.2' # __store_version__ is to be incremented when the format of the working copy # "store" changes in an incompatible way. Please add any needed migration @@ -81,7 +81,7 @@ - +