*.box
# files generated by build
FDroidServer.egg-info/
-# generated docs files
-docs/fdroid.aux
-docs/fdroid.cp
-docs/fdroid.cps
-docs/fdroid.fn
-docs/fdroid.info
-docs/fdroid.ky
-docs/fdroid.log
-docs/fdroid.pg
-docs/fdroid.toc
-docs/fdroid.tp
-docs/fdroid.vr
-
F-Droid is an installable catalogue of FOSS (Free and Open Source Software)
applications for the Android platform. The client makes it easy to browse,
install, and keep track of updates on your device.
For documentation, please see the docs directory.
Alternatively, visit http://f-droid.org/manual/
-
else
SUFFIX=''
fi
- tar jxvf /vagrant/cache/android-ndk-r9-linux-x86$SUFFIX.tar.bz2
- tar jxvf /vagrant/cache/android-ndk-r9-linux-x86$SUFFIX-legacy-toolchains.tar.bz2
- mv android-ndk-r9 #{ndk_loc}
+ tar jxvf /vagrant/cache/android-ndk-r9b-linux-x86$SUFFIX.tar.bz2
+ tar jxvf /vagrant/cache/android-ndk-r9b-linux-x86$SUFFIX-legacy-toolchains.tar.bz2
+ mv android-ndk-r9b #{ndk_loc}
"
not_if do
File.exists?("#{ndk_loc}")
not_if "grep ANDROID_HOME /home/#{user}/.bsenv"
end
-%w{18.1.1}.each do |ver|
-
- script "add_build_tools_#{ver}" do
- interpreter "bash"
- user user
- cwd "/tmp"
- code "
- if [ -f /vagrant/cache/build-tools/#{ver}.tar.gz ] ; then
- echo Installing from cache
- mkdir #{sdk_loc}/build-tools
- tar -C #{sdk_loc}/build-tools -z -x -f /vagrant/cache/build-tools/#{ver}.tar.gz
- else
- #{sdk_loc}/tools/android update sdk --no-ui -a -t build-tools-#{ver} <<X
+script "add_build_tools" do
+ interpreter "bash"
+ user user
+ ver = "19.0.1"
+ cwd "/tmp"
+ code "
+ if [ -f /vagrant/cache/build-tools/#{ver}.tar.gz ] ; then
+ echo Installing from cache
+ mkdir #{sdk_loc}/build-tools
+ tar -C #{sdk_loc}/build-tools -z -x -f /vagrant/cache/build-tools/#{ver}.tar.gz
+ else
+ #{sdk_loc}/tools/android update sdk --no-ui -a -t build-tools-#{ver} <<X
y
X
- fi
- "
- not_if "test -d #{sdk_loc}/build-tools/#{ver}"
- end
+ fi
+ sed -i '/BTPATH/d' /home/#{user}/.bsenv
+ echo \"export PATH=\\$PATH:#{sdk_loc}/build-tools/#{ver} #BTPATH\" >> /home/#{user}/.bsenv
+ "
+ not_if "test -d #{sdk_loc}/build-tools/#{ver}"
end
-# This is currently 18.0.1
+# This is currently 19.0.1
script "add_platform_tools" do
interpreter "bash"
user user
end
-%w{addon-google_apis-google-7 addon-google_apis-google-10 addon-google_apis-google-15 addon-google_apis-google-16 addon-google_apis-google-17}.each do |sdk|
-
- script "add_addon_#{sdk}" do
- interpreter "bash"
- user user
- cwd "/tmp"
- code "
- if [ -f /vagrant/cache/add-ons/#{sdk}.tar.gz ] ; then
- echo Installing from cache
- tar -C #{sdk_loc}/add-ons -z -x -f /vagrant/cache/add-ons/#{sdk}.tar.gz
- else
- echo Installing via 'android'
- #{sdk_loc}/tools/android update sdk --no-ui -a -t #{sdk} <<X
-y
-
-X
- fi
- "
-
- not_if "test -d #{sdk_loc}/add-ons/#{sdk}"
-
- end
-
-end
-
-
end
end
-script "install-gradle" do
- cwd "/tmp"
- interpreter "bash"
- code "
- unzip /vagrant/cache/gradle-1.8-bin.zip
- mv gradle-1.8 /opt/gradle
- "
- not_if "test -d /opt/gradle"
-end
-
-execute "add-gradle-home" do
- user user
- command "echo \"export GRADLE_HOME=/opt/gradle\" >> /home/#{user}/.bsenv"
- not_if "grep GRADLE_HOME /home/#{user}/.bsenv"
-end
-execute "add-gradle-bin" do
- user user
- command "echo \"export PATH=\\$PATH:/opt/gradle/bin\" >> /home/#{user}/.bsenv"
- not_if "grep gradle/bin /home/#{user}/.bsenv"
-end
execute "add-bsenv" do
user user
command "echo \". ./.bsenv \" >> /home/#{user}/.bashrc"
--- /dev/null
+
+user = node[:settings][:user]
+
+gradle_script = IO.read(File.join(
+ File.expand_path(File.dirname(__FILE__)), "gradle"))
+
+script "add-gradle-bindir" do
+ cwd "/tmp"
+ interpreter "bash"
+ code "mkdir -p /opt/gradle/bin"
+ not_if "test -d /opt/gradle/bin"
+end
+
+script "add-gradle-verdir" do
+ cwd "/tmp"
+ interpreter "bash"
+ code "mkdir -p /opt/gradle/versions"
+ not_if "test -d /opt/gradle/versions"
+end
+
+%w{1.4 1.6 1.7 1.8 1.9}.each do |ver|
+ script "install-gradle-#{ver}" do
+ cwd "/tmp"
+ interpreter "bash"
+ code "
+ unzip /vagrant/cache/gradle-#{ver}-bin.zip
+ mv gradle-#{ver} /opt/gradle/versions/#{ver}
+ "
+ not_if "test -d /opt/gradle/versions/#{ver}"
+ end
+end
+
+script "add-gradle-wrapper" do
+ cwd "/tmp"
+ interpreter "bash"
+ code "
+ cat << \"EOF\" > /opt/gradle/bin/gradle
+#{gradle_script}
+EOF
+ chmod a+x /opt/gradle/bin/gradle
+ "
+end
+
+execute "add-android-ndk-path" do
+ user user
+ command "echo \"export PATH=\\$PATH:/opt/gradle/bin #PATH-GRADLE\" >> /home/#{user}/.bsenv"
+ not_if "grep PATH-GRADLE /home/#{user}/.bsenv"
+end
--- /dev/null
+#!/bin/bash
+
+bindir="$(dirname $0)"
+basedir="$(dirname $bindir)"
+verdir="${basedir}/versions"
+args=("$@")
+pushd "${verdir}" &>/dev/null
+
+v_all=(*/)
+v_all=(${v_all[@]%/})
+
+v_def=${v_all[-1]}
+echo "Available gradle versions: ${v_all[@]}"
+
+popd &>/dev/null
+
+run_gradle() {
+ ${verdir}/${v_found}/bin/gradle "${args[@]}"
+ exit $?
+}
+
+# key-value pairs of what gradle version each gradle plugin version
+# should accept
+d_plugin_k=(0.7 0.6 0.5 0.4 0.3 0.2)
+d_plugin_v=(1.9 1.8 1.6 1.6 1.4 1.4)
+
+# Latest takes priority
+files=(build.gradle)
+
+for f in ${files[@]}; do
+ [[ -f $f ]] || continue
+ while read l; do
+ if [[ $l == *'com.android.tools.build:gradle:'* ]]; then
+ plugin_pver=$(echo -n "$l" | sed "s/.*com.android.tools.build:gradle:\\([0-9\\.\\+]\\+\\).*/\\1/")
+ elif [[ $l == *'gradleVersion'* ]]; then
+ wrapper_ver=$(echo -n "$l" | sed "s/.*gradleVersion[ ]*=[ ]*[\"']\\([0-9\\.]\\+\\)[\"'].*/\\1/")
+ fi
+ done < $f
+done
+
+if [[ -n $wrapper_ver ]]; then
+ v_found=$wrapper_ver
+ echo Found $v_found via gradleVersion
+ run_gradle
+fi
+
+if [[ -n $plugin_pver ]]; then
+ i=0
+ match=false
+ for k in ${d_plugin_k[@]}; do
+ if [[ $plugin_pver == ${k}* ]]; then
+ plugin_ver=${d_plugin_v[$i]}
+ match=true
+ break
+ fi
+ let i++
+ done
+ if $match; then
+ v_found=$plugin_ver
+ echo Found $v_found via gradle plugin version $k
+ fi
+fi
+
+[[ -n $v_found ]] && run_gradle
+
+echo No suitable gradle version found - defaulting to $v_def
+v_found=$v_def
+run_gradle
#!/bin/bash
#
-# fdroid completion support.
+# bash-completion - part of the FDroid server tools
+# Commits updates to apps, allowing you to edit the commit messages
#
-# 'fdroid' is aliased automatically, but aliases to it are not. For instance,
-# to alias 'fd' to 'fdroid' and have competion available:
+# Copyright (C) 2013, 2014 Daniel Martí <mvdan@mvdan.cc>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+# 'fdroid' is completed automatically, but aliases to it are not.
+# For instance, to alias 'fd' to 'fdroid' and have competion available:
#
# alias fd='fdroid'
# complete -F _fdroid fd
#
# alias fbuild='fdroid build'
# complete -F _fdroid_build fbuild
-#
-# There are also completion function for '-p com.some.app' aliases:
-#
-# alias fbld='fdroid build -v -l -p'
-# complete -F _fdroid_build_project fbld
-#
-# alias fcheckup='fdroid checkupdates -v -p'
-# complete -F _fdroid_checkupdates_project fcheckup
-#
-# This way, one can simply do 'fbld com.some.app' or 'fcheckup com.some.app'
-
-__fdroid_cwd() {
- if [[ "${PWD##*/}" == metadata ]]; then
- subdir=.
- else
- [ -d metadata ] && subdir=metadata || return 1
- fi
-}
__fdroid_init() {
COMPREPLY=()
}
__package() {
- [ -n "$subdir" ] || __fdroid_cwd
- files=( ${subdir}/*.txt )
- files=( ${files[@]#${subdir}/} )
+ files=( metadata/*.txt )
+ files=( ${files[@]#metadata/} )
files=${files[@]%.txt}
COMPREPLY=( $( compgen -W "$files" -- $cur ) )
}
+__apk_package() {
+ files=( ${1}/*.apk )
+ [ -f "${files[0]}" ] || return
+
+ files=( ${files[@]#*/} )
+ files=${files[@]%_*}
+ COMPREPLY=( $( compgen -W "$files" -- $cur ) )
+}
+
+__apk_vercode() {
+ local p=${cur:0:-1}
+
+ files=( ${1}/${p}_*.apk )
+ [ -f "${files[0]}" ] || return
+
+ files=( ${files[@]#*_} )
+ files=${files[@]%.apk}
+ COMPREPLY=( $( compgen -P "${p}:" -W "$files" -- $cur ) )
+}
+
__vercode() {
- local s p_found p
- s=${#COMP_WORDS[*]}
- p_found=false
-
- $aliased && p=${COMP_WORDS[1]} || {
- for (( i=1; i <= s; i++ )); do
- $p_found && { p=${COMP_WORDS[$i]}; break;}
- [[ ${COMP_WORDS[$i]} == -p ]] || [[ ${COMP_WORDS[$i]} == --package ]] &&\
- p_found=true
- done
- $p_found || return 0
- }
- [ -n "$subdir" ] || __fdroid_cwd
+ local p=${cur:0:-1}
- COMPREPLY=( $( compgen -W "$( while read line; do
+ COMPREPLY=( $( compgen -P "${p}:" -W "$( while read line; do
if [[ "$line" == "Build Version:"* ]]
then
line="${line#*,}"
line="${line#*,}"
printf "${line%%,*} "
fi
- done < "${subdir}/${p}.txt" )" -- $cur ) )
+ done < "metadata/${p}.txt" )" -- $cur ) )
}
__complete_options() {
}
__complete_build() {
- opts="-h -v -p -c -l -s -t -f"
- lopts="--help --verbose --package --vercode --latest --server --resetserver
- --on-server --force --install --all"
- case "${prev}" in
- -p|--package)
- __package
+ opts="-h -v -c -l -s -t -f"
+ lopts="--help --verbose --latest --server --resetserver --on-server
+ --force --all"
+ case "${cur}" in
+ -*)
+ __complete_options
return 0;;
- -c|--vercode)
+ *:)
__vercode
return 0;;
+ *)
+ __package
+ return 0;;
+ esac
+}
+
+__complete_install() {
+ opts="-h -v"
+ lopts="--help --verbose --all"
+ case "${cur}" in
+ -*)
+ __complete_options
+ return 0;;
+ *:)
+ __apk_vercode repo
+ return 0;;
+ *)
+ __apk_package repo
+ return 0;;
esac
- __complete_options
}
__complete_update() {
}
__complete_publish() {
- opts="-h -v -p"
- lopts="--help --verbose --package"
- case "${prev}" in
- -p|--package)
- __package
+ opts="-h -v"
+ lopts="--help --verbose"
+ case "${cur}" in
+ -*)
+ __complete_options
+ return 0;;
+ *:)
+ __apk_vercode unsigned
+ return 0;;
+ *)
+ __apk_package unsigned
return 0;;
esac
- __complete_options
}
__complete_checkupdates() {
- opts="-h -v -p"
- lopts="--help --verbose --package --auto --autoonly --commit --gplay"
- case "${prev}" in
- -p|--package)
+ opts="-h -v"
+ lopts="--help --verbose --auto --autoonly --commit --gplay"
+ case "${cur}" in
+ -*)
+ __complete_options
+ return 0;;
+ *)
__package
return 0;;
esac
- __complete_options
}
__complete_import() {
opts="-h -u -s -r"
lopts="--help --url --subdir --repo"
case "${prev}" in
- -u|--url|-r|--repo) return 0;;
- -s|--subdir)
- _filedir
- return 0;;
+ -u|--url|-r|--repo|-s|--subdir) return 0;;
esac
__complete_options
}
__complete_rewritemeta() {
- opts="-h -v -p"
- lopts="--help --verbose --package"
- case "${prev}" in
- -p|--package)
+ opts="-h -v"
+ lopts="--help --verbose"
+ case "${cur}" in
+ -*)
+ __complete_options
+ return 0;;
+ *)
+ __package
+ return 0;;
+ esac
+}
+
+__complete_lint() {
+ opts="-h -v"
+ lopts="--help --verbose"
+ case "${cur}" in
+ -*)
+ __complete_options
+ return 0;;
+ *)
__package
return 0;;
esac
- __complete_options
}
__complete_scanner() {
- opts="-h -v -p"
- lopts="--help --verbose --package --nosvn"
- case "${prev}" in
- -p|--package)
+ opts="-h -v"
+ lopts="--help --verbose --nosvn"
+ case "${cur}" in
+ -*)
+ __complete_options
+ return 0;;
+ *:)
+ __vercode
+ return 0;;
+ *)
__package
return 0;;
esac
- __complete_options
}
__complete_verify() {
opts="-h -v -p"
- lopts="--help --verbose --package"
- case "${prev}" in
- -p|--package)
+ lopts="--help --verbose"
+ case "${cur}" in
+ -*)
+ __complete_options
+ return 0;;
+ *:)
+ __vercode
+ return 0;;
+ *)
__package
return 0;;
esac
- __complete_options
}
__complete_stats() {
}
_fdroid() {
- local cmd cmds aliased
+ local cmd cmds
cmd=${COMP_WORDS[1]}
- cmds=" build init update publish checkupdates import rewritemeta scanner verify stats server "
- aliased=false
- __fdroid_cwd || return 0
+ cmds=" build init install update publish checkupdates import \
+rewritemeta lint scanner verify stats server "
for c in $cmds; do eval "_fdroid_${c} () {
- local cur prev cmds opts lopts
+ local cur prev opts lopts
__fdroid_init ${c};
}"; done
}
}
-_fdroid_build_project() {
- local cur prev cmds opts lopts aliased
- __fdroid_init
- aliased=true
- (( $COMP_CWORD == 1 )) && prev="-p"
-
- __complete_build
-}
-
-_fdroid_checkupdates_project() {
- local cur prev cmds opts lopts aliased
- __fdroid_init
- aliased=true
- (( $COMP_CWORD == 1 )) && prev="-p"
-
- __complete_checkupdates
-}
-
_fd-commit() {
- __fdroid_cwd || return 0
__package
}
sdk_path = "/home/vagrant/android-sdk"
ndk_path = "/home/vagrant/android-ndk"
-build_tools = "18.1.1"
+build_tools = "19.0.1"
mvn3 = "mvn"
gradle = "gradle"
-gradle_plugin = "0.6.+"
-manual/
+/manual/
+# generated docs files
+/fdroid.aux
+/fdroid.cp
+/fdroid.cps
+/fdroid.fn
+/fdroid.info
+/fdroid.ky
+/fdroid.log
+/fdroid.pg
+/fdroid.toc
+/fdroid.tp
+/fdroid.vr
\input texinfo @c -*-texinfo-*-
@c %**start of header
@setfilename fdroid.info
+@documentencoding UTF-8
@settitle F-Droid Server Manual
@c %**end of header
@item
Python 2.x
@item
-The Android SDK Tools and Build-tools.
-Note that F-Droid does not assume that you have the Android SDK in your
-@code{PATH}: these directories will be specified in your repository
-configuration. Recent revisions of the SDK have @code{aapt} located in
-android-sdk/build-tools/ and it may be necessary to make a symlink to it in
+The Android SDK Tools and Build-tools.
+Note that F-Droid does not assume that you have the Android SDK in your
+@code{PATH}: these directories will be specified in your repository
+configuration. Recent revisions of the SDK have @code{aapt} located in
+android-sdk/build-tools/ and it may be necessary to make a symlink to it in
android-sdk/platform-tools/
@end itemize
@itemize @bullet
@item
-All SDK platforms requested by the apps you want to build
-(The Android SDK is made available by Google under a proprietary license but
-within that, the SDK platforms, support library and some other components are
-under the Apache license and source code is provided.
-Google APIs, used for building apps using Google Maps, are free to the extent
-that the library comes pre-installed on the device.
-Google Play Services, Google Admob and others are proprietary and shouldn't be
-included in the main F-Droid repository.)
-@item
-A version of the Android NDK
-@item
-Ant
-@item
-Ant Contrib Tasks (Debian package ant-contrib)
-@item
-Maven (Debian package maven)
-@item
-JavaCC (Debian package javacc)
-@item
JDK (Debian package openjdk-6-jdk): openjdk-6 is recommended though openjdk-7
should work too
@item
VCS clients: svn, git, git-svn, hg, bzr
@item
-Miscellaneous packages listed in
-buildserver/cookbooks/fdroidbuild-general/recipes/default.rb
-of the F-Droid server repository
-@item
A keystore for holding release keys. (Safe, secure and well backed up!)
@end itemize
@item
Ruby (debian packages ruby and rubygems)
@item
-Vagrant (gem install vagrant)
+Vagrant (unpackaged) Be sure to use 1.3.x because 1.4.x is completely broken
+(at the time of writing, the forthcoming 1.4.3 might work)
@item
Paramiko (debian package python-paramiko)
@item
Magic (debian package python-magic)
@end itemize
+On the other hand, if you want to build the apps directly on your system
+without the 'Build Server' system, you may need:
+
+@itemize @bullet
+@item
+All SDK platforms requested by the apps you want to build
+(The Android SDK is made available by Google under a proprietary license but
+within that, the SDK platforms, support library and some other components are
+under the Apache license and source code is provided.
+Google APIs, used for building apps using Google Maps, are free to the extent
+that the library comes pre-installed on the device.
+Google Play Services, Google Admob and others are proprietary and shouldn't be
+included in the main F-Droid repository.)
+@item
+A version of the Android NDK
+@item
+Ant with Contrib Tasks (Debian packages ant and ant-contrib)
+@item
+Maven (Debian package maven)
+@item
+JavaCC (Debian package javacc)
+@item
+Miscellaneous packages listed in
+buildserver/cookbooks/fdroidbuild-general/recipes/default.rb
+of the F-Droid server repository
+@end itemize
@node Setup
@chapter Setup
following:
@example
-./fdroid build --package=org.fdroid.fdroid --vercode 16
+./fdroid build org.fdroid.fdroid:16
@end example
This attempts to build version code 16 (which is version 0.25) of the F-Droid
-client. Many of the tools recognise this @code{--package} parameter, allowing
-their activity to be limited to just a single package.
+client. Many of the tools recognise arguments as packages, allowing their
+activity to be limited to just a limited set of packages.
If the build above was successful, two files will have been placed in the
@code{unsigned} directory:
@code{unsigned} after the build, but with the risk of forgetting to do so!
Along similar lines (and only in conjunction with @code{--test}, you can use
-@code{--force} to force a build of a Disabled application, where normally it
-would be completely ignored. Similarly a version that was found to contain
-ELFs or known non-free libraries can be forced to build. See also —
+@code{--force} to force a build of a Disabled application, where normally it
+would be completely ignored. Similarly a version that was found to contain
+ELFs or known non-free libraries can be forced to build. See also —
scanignore= and scandelete= in the Build Version section.
-If the build was unsuccessful, you can find out why by looking at the output
-in the logs/ directory. If that isn't illuminating, try building the app the
+If the build was unsuccessful, you can find out why by looking at the output
+in the logs/ directory. If that isn't illuminating, try building the app the
regular way, step by step: android update project, ndk-build, ant debug.
-Note that source code repositories often contain prebuilt libraries. If the
-app is being considered for the main F-Droid repository, it is important that
-all such prebuilts are built either via the metadata or by a reputable third
+Note that source code repositories often contain prebuilt libraries. If the
+app is being considered for the main F-Droid repository, it is important that
+all such prebuilts are built either via the metadata or by a reputable third
party.
@section Direct Installation
-You can also build and install directly to a connected device or emulator using
-the @code{--install} switch. If you do this without using @code{--package} and
-@code{--vercode} then all versions of all packages will be installed (with each
-individual version overwriting the previous!). In most cases, this will not be
-what you want to do, so execution will stop straight away. However, you can
-override this if you're sure that's what you want, by using @code{--all}.
-Note that currently, no sanity checks are performed with this mode, so that if
-the version is incorrect or that if the package name is different, you won't
-be informed.
+You can also build and install directly to a connected device or emulator
+using the @code{fdroid install} command. If you do this without passing
+packages as arguments then all the latest built and signed version available
+of each package will be installed . In most cases, this will not be what you
+want to do, so execution will stop straight away. However, you can override
+this if you're sure that's what you want, by using @code{--all}. Note that
+currently, no sanity checks are performed with this mode, so if the files in
+the signed output directory were modified, you won't be notified.
@node Importing Applications
* Categories::
* License::
* Name::
+* Provides::
* Auto Name::
* Web Site::
* Source Code::
@cindex license
-The overall license for the application, or in certain cases, for the
-source code only.
+The overall license for the application, or in certain cases, for the
+source code only.
-Common values:
+Common values:
@itemize @bullet
@item
@samp{GPL}
-An unspecified GPL version. Use this only as a last resort or if there is
-some confusion over compatiblity of component licenses: particularly the use of
+An unspecified GPL version. Use this only as a last resort or if there is
+some confusion over compatiblity of component licenses: particularly the use of
Apache libraries with GPLv2 source code.
@item
@cindex Name
-The name of the application. Normally, this field should not be present since
-the application's correct name is retrieved from the APK file. However, in a
-situation where an APK contains a bad or missing application name, it can be
-overridden using this. Note that this only overrides the name in the list of
+The name of the application. Normally, this field should not be present since
+the application's correct name is retrieved from the APK file. However, in a
+situation where an APK contains a bad or missing application name, it can be
+overridden using this. Note that this only overrides the name in the list of
apps presented in the client; it doesn't changed the name or application label
in the source code.
+@node Provides
+@section Provides
+
+@cindex Provides
+
+Comma-separated list of application IDs that this app provides. In other
+words, if the user has any of these apps installed, F-Droid will show this app
+as installed instead. It will also appear if the user clicks on urls linking
+to the other app IDs. Useful when an app switches package name, or when you
+want an app to act as multiple apps.
+
@node Web Site
@section Web Site
@cindex Summary
-A brief summary of what the application is. Since the summary is only allowed
-one line on the list of the F-Droid client, keeping it to within 32 characters
+A brief summary of what the application is. Since the summary is only allowed
+one line on the list of the F-Droid client, keeping it to within 32 characters
will ensure it fits even on the smallest screens.
@node Description
@cindex Description
-A full description of the application, relevant to the latest version.
-This can span multiple lines (which should be kept to a maximum of 80
+A full description of the application, relevant to the latest version.
+This can span multiple lines (which should be kept to a maximum of 80
characters), and is terminated by a line containing a single '.'.
Basic MediaWiki-style formatting can be used. Leaving a blank line starts a
a new line, and numbered lists are the same but using @code{#}. There is
currently no support for nesting lists - you can have one level only.
-It can be helpful to note information pertaining to updating from an
-earlier version; whether the app contains any prebuilts built by the
-upstream developers or whether non-free elements were removed; whether the
-app is in rapid development or whether the latest version lags behind the
-current version; whether the app supports multiple architectures or whether
+It can be helpful to note information pertaining to updating from an
+earlier version; whether the app contains any prebuilts built by the
+upstream developers or whether non-free elements were removed; whether the
+app is in rapid development or whether the latest version lags behind the
+current version; whether the app supports multiple architectures or whether
there is a maximum SDK specified (such info not being recorded in the index).
This is converted to (@code{<desc>}) in the public index file.
The purpose of this feature is to allow non-buildable releases (e.g. the source
is not published) to be flagged, so the scripts don't generate repeated
messages about them. (And also to record the information for review later).
-If an apk has already been built, disabling causes it to be deleted once
-@code{fdroid update} is run; this is the procedure if ever a version has to
+If an apk has already been built, disabling causes it to be deleted once
+@code{fdroid update} is run; this is the procedure if ever a version has to
be replaced.
@item subdir=<path>
try enabling this option.
@item target=<target>
-Specifies a particular SDK target for compilation, overriding the
-project.properties of the app and possibly sub-projects. Note that this does
-not change the target SDK in the AndroidManifest.xml — the level of features
-that can be included in the build. This is likely to cause the whole build.xml
-to be rewritten, which is fine if it's a 'standard' android file or doesn't
-already exist, but not a good idea if it's heavily customised. If you get an
-error about invalid target, first try @code{init=rm -rf bin/}; otherwise this
+Specifies a particular SDK target for compilation, overriding the
+project.properties of the app and possibly sub-projects. Note that this does
+not change the target SDK in the AndroidManifest.xml — the level of features
+that can be included in the build. This is likely to cause the whole build.xml
+to be rewritten, which is fine if it's a 'standard' android file or doesn't
+already exist, but not a good idea if it's heavily customised. If you get an
+error about invalid target, first try @code{init=rm -rf bin/}; otherwise this
parameter should do the trick.
Please note that gradle builds should be using compilesdk=.
with the version name for the build as specified in the metadata.
This is useful for cases when upstream repo failed to update it for
-specific tag; to build an arbitrary revision; to make it apparent that
-the version differs significantly from upstream; or to make it apparent
+specific tag; to build an arbitrary revision; to make it apparent that
+the version differs significantly from upstream; or to make it apparent
which architecture or platform the apk is designed to run on.
@item forcevercode=yes
of the project. Separate items with semicolons.
@item srclibs=a@@r;b@@r1;
-Specifies a list of source libraries or Android projects. Separate items with
-semicolons, and each item is of the form name@@rev where name is the predefined
-source library name and rev is the revision or tag in source control to use.
-
-Each srclib has a metadata file under srclibs/ in the repository directory,
-and the source code is stored in build/srclib/.
-Repo Type: and Repo: are specified in the same way as for apps; Subdir: can be
-a comma separated list, for when directories are renamed by upstream; Update
-Project: updates the projects in the working directory and one level down;
-Prepare: can be used for any kind of preparation: in particular if you need to
-update the project with a particular target. You can then also use $$name$$ in
-the init/prebuild/build command to substitute the relative path to the library
+Specifies a list of source libraries or Android projects. Separate items with
+semicolons, and each item is of the form name@@rev where name is the predefined
+source library name and rev is the revision or tag in source control to use.
+
+Each srclib has a metadata file under srclibs/ in the repository directory,
+and the source code is stored in build/srclib/.
+Repo Type: and Repo: are specified in the same way as for apps; Subdir: can be
+a comma separated list, for when directories are renamed by upstream; Update
+Project: updates the projects in the working directory and one level down;
+Prepare: can be used for any kind of preparation: in particular if you need to
+update the project with a particular target. You can then also use $$name$$ in
+the init/prebuild/build command to substitute the relative path to the library
directory, but it could need tweaking if you've changed into another directory.
@item patch=x
the @code{srclib} directory for details of this.
You can use $$SDK$$, $$NDK$$ and $$MVN3$$ to substitute the paths to the
-android SDK and NDK directories, and maven 3 executable respectively e.g.
+android SDK and NDK directories, and maven 3 executable respectively e.g.
for when you need to run @code{android update project} explicitly.
@item scanignore=path1;path2;...
Specify an alternate ant command (target) instead of the default
'release'. It can't be given any flags, such as the path to a build.xml.
-@item novcheck=yes
+@item novcheck=yes
Don't check that the version name and code in the resulting apk are
correct by looking at the build output - assume the metadata is
correct. This takes away a useful level of sanity checking, and should
@cindex AntiFeatures
This is optional - if present, it contains a comma-separated list of any of
-the following values, describing an anti-feature the application has.
-Even though such apps won't be displayed unless a settings box is ticked,
-it is a good idea to mention the reasons for the anti-feature(s) in the
+the following values, describing an anti-feature the application has.
+Even though such apps won't be displayed unless a settings box is ticked,
+it is a good idea to mention the reasons for the anti-feature(s) in the
description:
@itemize @bullet
@item
@samp{Tracking} - the application tracks and reports your activity to
-somewhere without your consent. It's commonly used for when developers
-obtain crash logs without the user's consent, or when an app is useless
+somewhere without your consent. It's commonly used for when developers
+obtain crash logs without the user's consent, or when an app is useless
without some kind of authentication.
@item
-@samp{NonFreeNet} - the application relies on computational services that
-are impossible to replace or that the replacement cannot be connected to
+@samp{NonFreeNet} - the application relies on computational services that
+are impossible to replace or that the replacement cannot be connected to
without major changes to the app.
@item
-@samp{NonFreeAdd} - the application promotes non-Free add-ons, such that the
-app is effectively an advert for other non-free software and such software is
+@samp{NonFreeAdd} - the application promotes non-Free add-ons, such that the
+app is effectively an advert for other non-free software and such software is
not clearly labelled as such.
@item
If this field is present, the application does not get put into the public
index. This allows metadata to be retained while an application is temporarily
disabled from being published. The value should be a description of why the
-application is disabled. No apks or source code archives are deleted: to purge
-an apk see the Build Version section or delete manually for developer builds.
-The field is therefore used when an app has outlived it's usefulness, because
+application is disabled. No apks or source code archives are deleted: to purge
+an apk see the Build Version section or delete manually for developer builds.
+The field is therefore used when an app has outlived it's usefulness, because
the source tarball is retained.
@node Requires Root
Set this optional field to "Yes" if the application requires root
privileges to be usable. This lets the client filter it out if the
-user so desires. Whether root is required or not, it is good to give
-a paragraph in the description to the conditions on which root may be
+user so desires. Whether root is required or not, it is good to give
+a paragraph in the description to the conditions on which root may be
asked for and the reason for it.
@node Update Check Mode
@itemize
@item
@code{None} - No checking is done because there's no appropriate automated way
-of doing so. Updates should be checked for manually. Use this, for example,
-when deploying betas or patched versions; when builds are done in a directory
-different to where the AndroidManifest.xml is; if the developers use the
-gradle build system and store version info in a separate file; if the
-developers make a new branch for each release and don't make tags; or if you've
+of doing so. Updates should be checked for manually. Use this, for example,
+when deploying betas or patched versions; when builds are done in a directory
+different to where the AndroidManifest.xml is; if the developers use the
+gradle build system and store version info in a separate file; if the
+developers make a new branch for each release and don't make tags; or if you've
changed the package name or version code logic.
@item
@code{Static} - No checking is done - either development has ceased or new versions
-are not desired. This method is also used when there is no other checking method
+are not desired. This method is also used when there is no other checking method
available and the upstream developer keeps us posted on new versions.
@item
-@code{RepoManifest} - At the most recent commit, the AndroidManifest.xml file
+@code{RepoManifest} - At the most recent commit, the AndroidManifest.xml file
is looked for in the directory where it was found in the the most recent build.
-The appropriateness of this method depends on the development process used by
-the application's developers. You should not specify this method unless you're
-sure it's appropriate. For example, some developers bump the version when
+The appropriateness of this method depends on the development process used by
+the application's developers. You should not specify this method unless you're
+sure it's appropriate. For example, some developers bump the version when
commencing development instead of when publishing.
-It will return an error if the AndroidManifest.xml has moved to a different
-directory or if the package name has changed.
-The current version that it gives may not be accurate, since not all
-versions are fit to be published. Therefore, before building, it is often
-necessary to check if the current version has been published somewhere by the
-upstream developers, either by checking for apks that they distribute or for
-tags in the source code repository.
-
-It currently works for every repository type to different extents, except
-the srclib repo type. For git, git-svn and hg repo types, you may use
-"RepoManifest/yourbranch" as UCM so that "yourbranch" would be the branch used
-in place of the default one. The default values are "master" for git,
-"default" for hg and none for git-svn (it stays in the same branch).
-On the other hand, branch support hasn't been implemented yet in bzr and svn,
+It will return an error if the AndroidManifest.xml has moved to a different
+directory or if the package name has changed.
+The current version that it gives may not be accurate, since not all
+versions are fit to be published. Therefore, before building, it is often
+necessary to check if the current version has been published somewhere by the
+upstream developers, either by checking for apks that they distribute or for
+tags in the source code repository.
+
+It currently works for every repository type to different extents, except
+the srclib repo type. For git, git-svn and hg repo types, you may use
+"RepoManifest/yourbranch" as UCM so that "yourbranch" would be the branch used
+in place of the default one. The default values are "master" for git,
+"default" for hg and none for git-svn (it stays in the same branch).
+On the other hand, branch support hasn't been implemented yet in bzr and svn,
but RepoManifest may still be used without it.
@item
@code{RepoTrunk} - For svn and git-svn repositories, especially those who
source repository is checked, looking for the highest version code. The
appropriateness of this method depends on the development process used by the
application's developers. You should not specify this method unless you're sure
-it's appropriate. It shouldn't be used if the developers like to tag betas or
-are known to forget to tag releases. Like RepoManifest, it will not return the
+it's appropriate. It shouldn't be used if the developers like to tag betas or
+are known to forget to tag releases. Like RepoManifest, it will not return the
correct value if the directory containing the AndroidManifest.xml has moved.
-Despite these caveats, it is the often the favourite update check mode.
+Despite these caveats, it is the often the favourite update check mode.
It currently only works for git, hg, bzr and git-svn repositories. In the case
of the latter, the repo URL must encode the path to the trunk and tags or else
@cindex Auto Update Mode
-This determines the method using for auto-generating new builds when new
-releases are available - in other words, adding a new Build Version line to the
+This determines the method using for auto-generating new builds when new
+releases are available - in other words, adding a new Build Version line to the
metadata.
This happens in conjunction with the 'Update Check Mode' functionality - i.e.
when an update is detected by that, it is also processed by this.
The name of the version that is current. There may be newer versions of the
application than this (e.g. betas), and there will almost certainly be older
-ones. This should be the one that is recommended for general use.
-In the event that there is no source code for the current version, or that
-non-free libraries are being used, this would ideally be the latest
-version that is still free, though it may still be expedient to
+ones. This should be the one that is recommended for general use.
+In the event that there is no source code for the current version, or that
+non-free libraries are being used, this would ideally be the latest
+version that is still free, though it may still be expedient to
retain the automatic update check — see No Source Since.
This field is normally automatically updated - see Update Check Mode.
@cindex No Source Since
In case we are missing the source code for the Current Version reported by
-Upstream, or that non-free elements have been introduced, this defines the
+Upstream, or that non-free elements have been introduced, this defines the
first version that began to miss source code.
Apps that are missing source code for just one or a few versions, but provide
source code for newer ones are not to be considered here - this field is
@section Setting up a build server
In addition to the basic setup previously described, you will also need
-a Vagrant-compatible Ubuntu Raring base box called 'raring32' (or raring64
+a Vagrant-compatible Debian Testing base box called 'testing32' (or testing64
for a 64-bit VM, if you want it to be much slower, and require more disk
space).
would fail.
Unless you're very trusting. you should create one of these for yourself
-from verified standard Ubuntu installation media. However, you could skip
-over the next few paragraphs (and sacrifice some security) by downloading
-@url{https://f-droid.org/raring32.box} or @url{https://f-droid.org/raring64.box}.
+from verified standard Debian installation media. However, you could skip
+over the next few paragraphs (and sacrifice some security) by downloading
+@url{https://f-droid.org/testing32.box}.
Documentation for creating a base box can be found at
@url{http://docs.vagrantup.com/v1/docs/base_boxes.html}.
-1 in @code{/etc/grub/default} and then run @code{update-grub}.
@end enumerate
-You may also want to edit @code{buildserver/Vagrantfile} - in particular
-there is a path for retrieving the base box if it doesn't exist, and an
-apt proxy definition, both of which may need customising for your
-environment.
With this base box available, you should then create @code{makebs.config.py},
using @code{makebs.config.sample.py} as a reference - look at the settings and
documentation there to decide if any need changing to suit your environment.
+There is a path for retrieving the base box if it doesn't exist, and an apt
+proxy definition, both of which may need customising for your environment.
You can then go to the @code{fdroidserver} directory and run this:
@example
-./makebuildserver.py
+./makebuildserver
@end example
This will take a long time, and use a lot of bandwidth - most of it spent
downloads, but also .tar.gz files for all the relevant additions. If the
provisioning scripts detect these, they will be used in preference to
running the android tools. For example, if you have
-@code{buildserver/addons/cache/platforms/android-15.tar.gz} that will be
-used when installing the android-15 platform, instead of re-downloading it
-using @code{android update sdk --no-ui -t android-15}.
+@code{buildserver/addons/cache/platforms/android-19.tar.gz} that will be
+used when installing the android-19 platform, instead of re-downloading it
+using @code{android update sdk --no-ui -t android-19}.
Once it's complete you'll have a new base box called 'buildserver' which is
what's used for the actual builds. You can then build packages as normal,
# gendocs.sh -- generate a GNU manual in many formats. This script is
# mentioned in maintain.texi. See the help message below for usage details.
-scriptversion=2011-04-08.14
+scriptversion=2013-02-03.15
-# Copyright 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
-# Foundation, Inc.
+# Copyright 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013
+# Free Software Foundation, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
#
# An up-to-date copy is also maintained in Gnulib (gnu.org/software/gnulib).
+# TODO:
+# - image importation was only implemented for HTML generated by
+# makeinfo. But it should be simple enough to adjust.
+# - images are not imported in the source tarball. All the needed
+# formats (PDF, PNG, etc.) should be included.
+
prog=`basename "$0"`
srcdir=`pwd`
: ${SETLANG="env LANG= LC_MESSAGES= LC_ALL= LANGUAGE="}
: ${MAKEINFO="makeinfo"}
: ${TEXI2DVI="texi2dvi -t @finalout"}
-: ${DVIPS="dvips"}
: ${DOCBOOK2HTML="docbook2html"}
: ${DOCBOOK2PDF="docbook2pdf"}
-: ${DOCBOOK2PS="docbook2ps"}
: ${DOCBOOK2TXT="docbook2txt"}
: ${GENDOCS_TEMPLATE_DIR="."}
+: ${PERL='perl'}
: ${TEXI2HTML="texi2html"}
unset CDPATH
unset use_texi2html
version="gendocs.sh $scriptversion
-Copyright 2010 Free Software Foundation, Inc.
+Copyright 2013 Free Software Foundation, Inc.
There is NO warranty. You may redistribute this software
under the terms of the GNU General Public License.
For more information about these matters, see the files named COPYING."
usage="Usage: $prog [OPTION]... PACKAGE MANUAL-TITLE
-Generate various output formats from PACKAGE.texinfo (or .texi or .txi) source.
-See the GNU Maintainers document for a more extensive discussion:
+Generate output in various formats from PACKAGE.texinfo (or .texi or
+.txi) source. See the GNU Maintainers document for a more extensive
+discussion:
http://www.gnu.org/prep/maintain_toc.html
Options:
- -s SRCFILE read Texinfo from SRCFILE, instead of PACKAGE.{texinfo|texi|txi}
- -o OUTDIR write files into OUTDIR, instead of manual/.
- --email ADR use ADR as contact in generated web pages.
- --docbook convert to DocBook too (xml, txt, html, pdf and ps).
- --html ARG pass indicated ARG to makeinfo or texi2html for HTML targets.
- --texi2html use texi2html to generate HTML targets.
- --help display this help and exit successfully.
- --version display version information and exit successfully.
+ --email ADR use ADR as contact in generated web pages; always give this.
+
+ -s SRCFILE read Texinfo from SRCFILE, instead of PACKAGE.{texinfo|texi|txi}
+ -o OUTDIR write files into OUTDIR, instead of manual/.
+ -I DIR append DIR to the Texinfo search path.
+ --common ARG pass ARG in all invocations.
+ --html ARG pass ARG to makeinfo or texi2html for HTML targets.
+ --info ARG pass ARG to makeinfo for Info, instead of --no-split.
+ --no-ascii skip generating the plain text output.
+ --source ARG include ARG in tar archive of sources.
+ --split HOW make split HTML by node, section, chapter; default node.
+
+ --texi2html use texi2html to make HTML target, with all split versions.
+ --docbook convert through DocBook too (xml, txt, html, pdf).
+
+ --help display this help and exit successfully.
+ --version display version information and exit successfully.
Simple example: $prog --email bug-gnu-emacs@gnu.org emacs \"GNU Emacs Manual\"
wget \"$templateurl\"
$prog --email BUGLIST MANUAL \"GNU MANUAL - One-line description\"
-Output will be in a new subdirectory \"manual\" (by default, use -o OUTDIR
-to override). Move all the new files into your web CVS tree, as
-explained in the Web Pages node of maintain.texi.
+Output will be in a new subdirectory \"manual\" (by default;
+use -o OUTDIR to override). Move all the new files into your web CVS
+tree, as explained in the Web Pages node of maintain.texi.
-Please use the --email ADDRESS option to specify your bug-reporting
-address in the generated HTML pages.
+Please use the --email ADDRESS option so your own bug-reporting
+address will be used in the generated HTML pages.
MANUAL-TITLE is included as part of the HTML <title> of the overall
manual/index.html file. It should include the name of the package being
first copy or symlink all Texinfo sources into a single directory.
(Part of the script's work is to make a tar.gz of the sources.)
-You can set the environment variables MAKEINFO, TEXI2DVI, TEXI2HTML, and
-DVIPS to control the programs that get executed, and
+As implied above, by default monolithic Info files are generated.
+If you want split Info, or other Info options, use --info to override.
+
+You can set the environment variables MAKEINFO, TEXI2DVI, TEXI2HTML,
+and PERL to control the programs that get executed, and
GENDOCS_TEMPLATE_DIR to control where the gendocs_template file is
looked for. With --docbook, the environment variables DOCBOOK2HTML,
-DOCBOOK2PDF, DOCBOOK2PS, and DOCBOOK2TXT are also respected.
+DOCBOOK2PDF, and DOCBOOK2TXT are also consulted.
By default, makeinfo and texi2dvi are run in the default (English)
locale, since that's the language of most Texinfo manuals. If you
Email bug reports or enhancement requests to bug-texinfo@gnu.org.
"
-calcsize()
-{
- size=`ls -ksl $1 | awk '{print $1}'`
- echo $size
-}
-
MANUAL_TITLE=
PACKAGE=
EMAIL=webmasters@gnu.org # please override with --email
+commonarg= # passed to all makeinfo/texi2html invcations.
+dirargs= # passed to all tools (-I dir).
+dirs= # -I's directories.
htmlarg=
+infoarg=--no-split
+generate_ascii=true
outdir=manual
+source_extra=
+split=node
srcfile=
while test $# -gt 0; do
case $1 in
- --email) shift; EMAIL=$1;;
- --help) echo "$usage"; exit 0;;
- --version) echo "$version"; exit 0;;
- -s) shift; srcfile=$1;;
- -o) shift; outdir=$1;;
- --docbook) docbook=yes;;
- --html) shift; htmlarg=$1;;
+ -s) shift; srcfile=$1;;
+ -o) shift; outdir=$1;;
+ -I) shift; dirargs="$dirargs -I '$1'"; dirs="$dirs $1";;
+ --common) shift; commonarg=$1;;
+ --docbook) docbook=yes;;
+ --email) shift; EMAIL=$1;;
+ --html) shift; htmlarg=$1;;
+ --info) shift; infoarg=$1;;
+ --no-ascii) generate_ascii=false;;
+ --source) shift; source_extra=$1;;
+ --split) shift; split=$1;;
--texi2html) use_texi2html=1;;
+
+ --help) echo "$usage"; exit 0;;
+ --version) echo "$version"; exit 0;;
-*)
echo "$0: Unknown option \`$1'." >&2
echo "$0: Try \`--help' for more information." >&2
shift
done
+# makeinfo uses the dirargs, but texi2dvi doesn't.
+commonarg=" $dirargs $commonarg"
+
# For most of the following, the base name is just $PACKAGE
base=$PACKAGE
exit 1
fi
+# Function to return size of $1 in something resembling kilobytes.
+calcsize()
+{
+ size=`ls -ksl $1 | awk '{print $1}'`
+ echo $size
+}
+
+# copy_images OUTDIR HTML-FILE...
+# -------------------------------
+# Copy all the images needed by the HTML-FILEs into OUTDIR. Look
+# for them in the -I directories.
+copy_images()
+{
+ local odir
+ odir=$1
+ shift
+ $PERL -n -e "
+BEGIN {
+ \$me = '$prog';
+ \$odir = '$odir';
+ @dirs = qw($dirs);
+}
+" -e '
+/<img src="(.*?)"/g && ++$need{$1};
+
+END {
+ #print "$me: @{[keys %need]}\n"; # for debugging, show images found.
+ FILE: for my $f (keys %need) {
+ for my $d (@dirs) {
+ if (-f "$d/$f") {
+ use File::Basename;
+ my $dest = dirname ("$odir/$f");
+ #
+ use File::Path;
+ -d $dest || mkpath ($dest)
+ || die "$me: cannot mkdir $dest: $!\n";
+ #
+ use File::Copy;
+ copy ("$d/$f", $dest)
+ || die "$me: cannot copy $d/$f to $dest: $!\n";
+ next FILE;
+ }
+ }
+ die "$me: $ARGV: cannot find image $f\n";
+ }
+}
+' -- "$@" || exit 1
+}
+
case $outdir in
/*) abs_outdir=$outdir;;
*) abs_outdir=$srcdir/$outdir;;
esac
-echo Generating output formats for $srcfile
+echo "Making output for $srcfile"
+echo " in `pwd`"
+mkdir -p "$outdir/"
-cmd="$SETLANG $MAKEINFO -o $PACKAGE.info \"$srcfile\""
-echo "Generating info files... ($cmd)"
+cmd="$SETLANG $MAKEINFO -o $PACKAGE.info $commonarg $infoarg \"$srcfile\""
+echo "Generating info... ($cmd)"
eval "$cmd"
-mkdir -p "$outdir/"
tar czf "$outdir/$PACKAGE.info.tar.gz" $PACKAGE.info*
+ls -l "$outdir/$PACKAGE.info.tar.gz"
info_tgz_size=`calcsize "$outdir/$PACKAGE.info.tar.gz"`
# do not mv the info files, there's no point in having them available
# separately on the web.
-cmd="$SETLANG ${TEXI2DVI} \"$srcfile\""
-echo "Generating dvi ... ($cmd)"
+cmd="$SETLANG $TEXI2DVI $dirargs \"$srcfile\""
+printf "\nGenerating dvi... ($cmd)\n"
eval "$cmd"
-
-# now, before we compress dvi:
-echo Generating postscript...
-${DVIPS} $PACKAGE -o
-gzip -f -9 $PACKAGE.ps
-ps_gz_size=`calcsize $PACKAGE.ps.gz`
-mv $PACKAGE.ps.gz "$outdir/"
-
# compress/finish dvi:
gzip -f -9 $PACKAGE.dvi
dvi_gz_size=`calcsize $PACKAGE.dvi.gz`
mv $PACKAGE.dvi.gz "$outdir/"
+ls -l "$outdir/$PACKAGE.dvi.gz"
-cmd="$SETLANG ${TEXI2DVI} --pdf \"$srcfile\""
-echo "Generating pdf ... ($cmd)"
+cmd="$SETLANG $TEXI2DVI --pdf $dirargs \"$srcfile\""
+printf "\nGenerating pdf... ($cmd)\n"
eval "$cmd"
pdf_size=`calcsize $PACKAGE.pdf`
mv $PACKAGE.pdf "$outdir/"
+ls -l "$outdir/$PACKAGE.pdf"
-cmd="$SETLANG $MAKEINFO -o $PACKAGE.txt --no-split --no-headers \"$srcfile\""
-echo "Generating ASCII... ($cmd)"
-eval "$cmd"
-ascii_size=`calcsize $PACKAGE.txt`
-gzip -f -9 -c $PACKAGE.txt >"$outdir/$PACKAGE.txt.gz"
-ascii_gz_size=`calcsize "$outdir/$PACKAGE.txt.gz"`
-mv $PACKAGE.txt "$outdir/"
+if $generate_ascii; then
+ opt="-o $PACKAGE.txt --no-split --no-headers $commonarg"
+ cmd="$SETLANG $MAKEINFO $opt \"$srcfile\""
+ printf "\nGenerating ascii... ($cmd)\n"
+ eval "$cmd"
+ ascii_size=`calcsize $PACKAGE.txt`
+ gzip -f -9 -c $PACKAGE.txt >"$outdir/$PACKAGE.txt.gz"
+ ascii_gz_size=`calcsize "$outdir/$PACKAGE.txt.gz"`
+ mv $PACKAGE.txt "$outdir/"
+ ls -l "$outdir/$PACKAGE.txt" "$outdir/$PACKAGE.txt.gz"
+fi
html_split()
{
- opt="--split=$1 $htmlarg --node-files"
+ opt="--split=$1 --node-files $commonarg $htmlarg"
cmd="$SETLANG $TEXI2HTML --output $PACKAGE.html $opt \"$srcfile\""
- echo "Generating html by $1... ($cmd)"
+ printf "\nGenerating html by $1... ($cmd)\n"
eval "$cmd"
split_html_dir=$PACKAGE.html
(
}
if test -z "$use_texi2html"; then
- opt="--no-split --html -o $PACKAGE.html $htmlarg"
+ opt="--no-split --html -o $PACKAGE.html $commonarg $htmlarg"
cmd="$SETLANG $MAKEINFO $opt \"$srcfile\""
- echo "Generating monolithic html... ($cmd)"
+ printf "\nGenerating monolithic html... ($cmd)\n"
rm -rf $PACKAGE.html # in case a directory is left over
eval "$cmd"
html_mono_size=`calcsize $PACKAGE.html`
gzip -f -9 -c $PACKAGE.html >"$outdir/$PACKAGE.html.gz"
html_mono_gz_size=`calcsize "$outdir/$PACKAGE.html.gz"`
+ copy_images "$outdir/" $PACKAGE.html
mv $PACKAGE.html "$outdir/"
+ ls -l "$outdir/$PACKAGE.html" "$outdir/$PACKAGE.html.gz"
- cmd="$SETLANG $MAKEINFO --html -o $PACKAGE.html $htmlarg \"$srcfile\""
- echo "Generating html by node... ($cmd)"
+ opt="--html -o $PACKAGE.html --split=$split $commonarg $htmlarg"
+ cmd="$SETLANG $MAKEINFO $opt \"$srcfile\""
+ printf "\nGenerating html by $split... ($cmd)\n"
eval "$cmd"
split_html_dir=$PACKAGE.html
+ copy_images $split_html_dir/ $split_html_dir/*.html
(
- cd ${split_html_dir} || exit 1
- tar -czf "$abs_outdir/${PACKAGE}.html_node.tar.gz" -- *.html
+ cd $split_html_dir || exit 1
+ tar -czf "$abs_outdir/$PACKAGE.html_$split.tar.gz" -- *
)
- html_node_tgz_size=`calcsize "$outdir/${PACKAGE}.html_node.tar.gz"`
- rm -f "$outdir"/html_node/*.html
- mkdir -p "$outdir/html_node/"
- mv ${split_html_dir}/*.html "$outdir/html_node/"
- rmdir ${split_html_dir}
-else
- cmd="$SETLANG $TEXI2HTML --output $PACKAGE.html $htmlarg \"$srcfile\""
- echo "Generating monolithic html... ($cmd)"
+ eval \
+ html_${split}_tgz_size=`calcsize "$outdir/$PACKAGE.html_$split.tar.gz"`
+ rm -rf "$outdir/html_$split/"
+ mv $split_html_dir "$outdir/html_$split/"
+ du -s "$outdir/html_$split/"
+ ls -l "$outdir/$PACKAGE.html_$split.tar.gz"
+
+else # use texi2html:
+ opt="--output $PACKAGE.html $commonarg $htmlarg"
+ cmd="$SETLANG $TEXI2HTML $opt \"$srcfile\""
+ printf "\nGenerating monolithic html with texi2html... ($cmd)\n"
rm -rf $PACKAGE.html # in case a directory is left over
eval "$cmd"
html_mono_size=`calcsize $PACKAGE.html`
html_split section
fi
-echo Making .tar.gz for sources...
+printf "\nMaking .tar.gz for sources...\n"
d=`dirname $srcfile`
(
cd "$d"
- srcfiles=`ls *.texinfo *.texi *.txi *.eps 2>/dev/null` || true
- tar cvzfh "$abs_outdir/$PACKAGE.texi.tar.gz" $srcfiles
+ srcfiles=`ls -d *.texinfo *.texi *.txi *.eps $source_extra 2>/dev/null` || true
+ tar czfh "$abs_outdir/$PACKAGE.texi.tar.gz" $srcfiles
+ ls -l "$abs_outdir/$PACKAGE.texi.tar.gz"
)
texi_tgz_size=`calcsize "$outdir/$PACKAGE.texi.tar.gz"`
if test -n "$docbook"; then
- cmd="$SETLANG $MAKEINFO -o - --docbook \"$srcfile\" > ${srcdir}/$PACKAGE-db.xml"
- echo "Generating docbook XML... ($cmd)"
+ opt="-o - --docbook $commonarg"
+ cmd="$SETLANG $MAKEINFO $opt \"$srcfile\" >${srcdir}/$PACKAGE-db.xml"
+ printf "\nGenerating docbook XML... ($cmd)\n"
eval "$cmd"
docbook_xml_size=`calcsize $PACKAGE-db.xml`
gzip -f -9 -c $PACKAGE-db.xml >"$outdir/$PACKAGE-db.xml.gz"
mv $PACKAGE-db.xml "$outdir/"
split_html_db_dir=html_node_db
- cmd="${DOCBOOK2HTML} -o $split_html_db_dir \"${outdir}/$PACKAGE-db.xml\""
- echo "Generating docbook HTML... ($cmd)"
+ opt="$commonarg -o $split_html_db_dir"
+ cmd="$DOCBOOK2HTML $opt \"${outdir}/$PACKAGE-db.xml\""
+ printf "\nGenerating docbook HTML... ($cmd)\n"
eval "$cmd"
(
cd ${split_html_db_dir} || exit 1
mv ${split_html_db_dir}/*.html "$outdir/html_node_db/"
rmdir ${split_html_db_dir}
- cmd="${DOCBOOK2TXT} \"${outdir}/$PACKAGE-db.xml\""
- echo "Generating docbook ASCII... ($cmd)"
+ cmd="$DOCBOOK2TXT \"${outdir}/$PACKAGE-db.xml\""
+ printf "\nGenerating docbook ASCII... ($cmd)\n"
eval "$cmd"
docbook_ascii_size=`calcsize $PACKAGE-db.txt`
mv $PACKAGE-db.txt "$outdir/"
- cmd="${DOCBOOK2PS} \"${outdir}/$PACKAGE-db.xml\""
- echo "Generating docbook PS... ($cmd)"
- eval "$cmd"
- gzip -f -9 -c $PACKAGE-db.ps >"$outdir/$PACKAGE-db.ps.gz"
- docbook_ps_gz_size=`calcsize "$outdir/$PACKAGE-db.ps.gz"`
- mv $PACKAGE-db.ps "$outdir/"
-
- cmd="${DOCBOOK2PDF} \"${outdir}/$PACKAGE-db.xml\""
- echo "Generating docbook PDF... ($cmd)"
+ cmd="$DOCBOOK2PDF \"${outdir}/$PACKAGE-db.xml\""
+ printf "\nGenerating docbook PDF... ($cmd)\n"
eval "$cmd"
docbook_pdf_size=`calcsize $PACKAGE-db.pdf`
mv $PACKAGE-db.pdf "$outdir/"
fi
-echo "Writing index file..."
+printf "\nMaking index file...\n"
if test -z "$use_texi2html"; then
- CONDS="/%%IF *HTML_SECTION%%/,/%%ENDIF *HTML_SECTION%%/d;\
- /%%IF *HTML_CHAPTER%%/,/%%ENDIF *HTML_CHAPTER%%/d"
+ CONDS="/%%IF *HTML_SECTION%%/,/%%ENDIF *HTML_SECTION%%/d;\
+ /%%IF *HTML_CHAPTER%%/,/%%ENDIF *HTML_CHAPTER%%/d"
else
- CONDS="/%%ENDIF.*%%/d;/%%IF *HTML_SECTION%%/d;/%%IF *HTML_CHAPTER%%/d"
+ # should take account of --split here.
+ CONDS="/%%ENDIF.*%%/d;/%%IF *HTML_SECTION%%/d;/%%IF *HTML_CHAPTER%%/d"
fi
+
curdate=`$SETLANG date '+%B %d, %Y'`
sed \
-e "s!%%TITLE%%!$MANUAL_TITLE!g" \
-e "s!%%INFO_TGZ_SIZE%%!$info_tgz_size!g" \
-e "s!%%DVI_GZ_SIZE%%!$dvi_gz_size!g" \
-e "s!%%PDF_SIZE%%!$pdf_size!g" \
- -e "s!%%PS_GZ_SIZE%%!$ps_gz_size!g" \
-e "s!%%ASCII_SIZE%%!$ascii_size!g" \
-e "s!%%ASCII_GZ_SIZE%%!$ascii_gz_size!g" \
-e "s!%%TEXI_TGZ_SIZE%%!$texi_tgz_size!g" \
-e "s!%%DOCBOOK_HTML_NODE_TGZ_SIZE%%!$html_node_db_tgz_size!g" \
-e "s!%%DOCBOOK_ASCII_SIZE%%!$docbook_ascii_size!g" \
- -e "s!%%DOCBOOK_PS_GZ_SIZE%%!$docbook_ps_gz_size!g" \
-e "s!%%DOCBOOK_PDF_SIZE%%!$docbook_pdf_size!g" \
-e "s!%%DOCBOOK_XML_SIZE%%!$docbook_xml_size!g" \
-e "s!%%DOCBOOK_XML_GZ_SIZE%%!$docbook_xml_gz_size!g" \
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
- "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
+<!--#include virtual="/server/header.html" -->
+<title>%%TITLE%% - GNU Project - Free Software Foundation (FSF)</title>
+<!--#include virtual="/server/banner.html" -->
+<h2>%%TITLE%%</h2>
-<head>
-<title>%%TITLE%%</title>
-<meta http-equiv="content-type" content='text/html; charset=utf-8' />
-<link rel="stylesheet" type="text/css" href="/gnu.css" />
-<link rev="made" href="admin-0droid.org" />
-</head>
-
-<body>
-
-<h3>%%TITLE%%</h3>
+<address>Free Software Foundation</address>
+<address>last updated %%DATE%%</address>
<p>This manual (%%PACKAGE%%) is available in the following formats:</p>
(%%ASCII_GZ_SIZE%%K bytes gzipped)</a>.</li>
<li><a href="%%PACKAGE%%.dvi.gz">TeX dvi file
(%%DVI_GZ_SIZE%%K bytes gzipped)</a>.</li>
-<li><a href="%%PACKAGE%%.ps.gz">PostScript file
- (%%PS_GZ_SIZE%%K bytes gzipped)</a>.</li>
<li><a href="%%PACKAGE%%.pdf">PDF file
(%%PDF_SIZE%%K bytes)</a>.</li>
<li><a href="%%PACKAGE%%.texi.tar.gz">Texinfo source
(%%TEXI_TGZ_SIZE%%K bytes gzipped tar file).</a></li>
</ul>
+<p>You can <a href="http://shop.fsf.org/">buy printed copies of
+some manuals</a> (among other items) from the Free Software Foundation;
+this helps support FSF activities.</p>
+
<p>(This page generated by the <a href="%%SCRIPTURL%%">%%SCRIPTNAME%%
script</a>.)</p>
+<!-- If needed, change the copyright block at the bottom. In general,
+ all pages on the GNU web server should have the section about
+ verbatim copying. Please do NOT remove this without talking
+ with the webmasters first.
+ Please make sure the copyright date is consistent with the document
+ and that it is like this: "2001, 2002", not this: "2001-2002". -->
+</div><!-- for id="content", starts in the include above -->
+<!--#include virtual="/server/footer.html" -->
+<div id="footer">
+
+<p>Please send general FSF & GNU inquiries to
+<a href="mailto:gnu@gnu.org"><gnu@gnu.org></a>.
+There are also <a href="/contact/">other ways to contact</a>
+the FSF.<br />
+Please send broken links and other corrections or suggestions to
+<a href="mailto:%%EMAIL%%"><%%EMAIL%%></a>.</p>
+
+<p>Copyright © 2013 Free Software Foundation, Inc.</p>
+
+<p>Verbatim copying and distribution of this entire article are
+permitted worldwide, without royalty, in any medium, provided this
+notice, and the copyright notice, are preserved.</p>
+
+</div>
+</div>
</body>
</html>
--- /dev/null
+### 11 (January 2014)
+
+* Support per-density icon folders (/icons-\*)
+
+### 10 (January 2014)
+
+* First version
+
+### 0 (?)
+
+* No version yet declared
#!/bin/bash
-
+#
+# fd-commit - part of the FDroid server tools
# Commits updates to apps, allowing you to edit the commit messages
+#
+# Copyright (C) 2013 Daniel Martí <mvdan@mvdan.cc>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
commands=()
while read line; do
if [[ "$line" == *M*metadata/*.txt ]]; then
file=${line##* }
-
+
id=${file##*/}
id=${id%.txt*}
if [ $# -gt 0 ]; then
$found || continue
fi
- if [ -d metadata/$id ]; then
- extra=metadata/$id
- else
- extra=
- fi
+ [ -d metadata/$id ] && extra=metadata/$id || extra=
name= autoname=
while read l; do
fullname="$id"
fi
- newbuild=0
+ newbuild=false
while read l; do
if [[ "$l" == "+Build:"* ]]; then
- newbuild=1
+ newbuild=true
build=${l#*:}
version=${build%%,*}
build=${build#*,}
fi
done < <(git diff HEAD -- "$file")
- if [ $newbuild -eq 0 ]; then
- message="$fullname:"
- else
+ if $newbuild ; then
message="Update $fullname to $version ($vercode)"
+ else
+ message="$fullname:"
fi
- commands+=("git commit -m '$message' -e -v -- $file $extra")
+ message=${message//\"/\\\"}
+ commands+=("git add -- $file $extra && git commit -m \"$message\" -e -v")
fi
done < <(git status --porcelain)
+git reset >/dev/null
for cmd in "${commands[@]}"; do
eval "$cmd"
+ git reset >/dev/null
done
-
# -*- coding: utf-8 -*-
#
# fdroid.py - part of the FDroid server tools
-# Copyright (C) 2010-12, Ciaran Gultnieks, ciaran@ciarang.com
+# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
+# Copyright (C) 2013 Daniel Martí <mvdan@mvdan.cc>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
commands = [
"build",
"init",
+ "install",
"update",
"publish",
"verify",
"checkupdates",
"import",
"rewritemeta",
+ "lint",
"scanner",
"stats",
"server"]
import time
import json
from ConfigParser import ConfigParser
-from optparse import OptionParser
+from optparse import OptionParser, OptionError
import common, metadata
from common import BuildException, VCSException, FDroidPopen
p = subprocess.Popen(['VBoxManage', 'snapshot', get_builder_vm_id(), 'list', '--details'],
cwd='builder', stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = p.communicate()[0]
- if output.find('fdroidclean') != -1:
+ if 'fdroidclean' in output:
if options.verbose:
print "...snapshot exists - resetting build server to clean state"
retcode, output = vagrant(['status'], cwd='builder')
- if output.find('running') != -1:
+ if 'running' in output:
if options.verbose:
print "...suspending"
vagrant(['suspend'], cwd='builder')
+ print "...waiting a sec..."
+ time.sleep(10)
p = subprocess.Popen(['VBoxManage', 'snapshot', get_builder_vm_id(), 'restore', 'fdroidclean'],
cwd='builder', stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = p.communicate()[0]
retcode, output = vagrant(['up'], cwd='builder')
if retcode != 0:
raise BuildException("Failed to start build server")
+ print "...waiting a sec..."
+ time.sleep(10)
vm_ok = True
else:
print "...failed to reset to snapshot"
if p.returncode != 0:
print output
raise BuildException("Failed to take snapshot")
+ print "...waiting a sec..."
+ time.sleep(10)
print "Restarting new build server"
retcode, _ = vagrant(['up'], cwd='builder')
if retcode != 0:
raise BuildException("Failed to start build server")
+ print "...waiting a sec..."
+ time.sleep(10)
# Make sure it worked...
p = subprocess.Popen(['VBoxManage', 'snapshot', get_builder_vm_id(), 'list', '--details'],
cwd='builder', stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = p.communicate()[0]
- if output.find('fdroidclean') == -1:
+ if 'fdroidclean' not in output:
raise BuildException("Failed to take snapshot.")
try:
cmdline += ' --force --test'
if options.verbose:
cmdline += ' --verbose'
- cmdline += ' -p ' + app['id'] + ' --vercode ' + thisbuild['vercode']
+ cmdline += " %s:%s" % (app['id'], thisbuild['vercode'])
chan.exec_command('bash -c ". ~/.bsenv && ' + cmdline + '"')
output = ''
error = ''
output += chan.recv(1024)
while chan.recv_stderr_ready():
error += chan.recv_stderr(1024)
+ time.sleep(0.1)
print "...getting exit status"
returncode = chan.recv_exit_status()
while True:
print "Suspending build server"
subprocess.call(['vagrant', 'suspend'], cwd='builder')
-def adapt_gradle(path):
- if options.verbose:
- print "Adapting build.gradle at %s" % path
-
- subprocess.call(['sed', '-i',
- 's@buildToolsVersion[ ]*["\\\'][0-9\.]*["\\\']@buildToolsVersion "'+ config['build_tools'] +'"@g', path])
+def adapt_gradle(build_dir):
+ for root, dirs, files in os.walk(build_dir):
+ if 'build.gradle' in files:
+ path = os.path.join(root, 'build.gradle')
+ if options.verbose:
+ print "Adapting build.gradle at %s" % path
- subprocess.call(['sed', '-i',
- 's@com.android.tools.build:gradle:[0-9\.\+]*@com.android.tools.build:gradle:'+ config['gradle_plugin'] +'@g', path])
+ subprocess.call(['sed', '-i',
+ 's@buildToolsVersion[ ]*["\\\'][0-9\.]*["\\\']@buildToolsVersion "'
+ + config['build_tools'] + '"@g', path])
-def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, install, force, onserver):
+def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver):
"""Do a build locally."""
# Prepare the source code...
# We need to clean via the build tool in case the binary dirs are
# different from the default ones
p = None
- if thisbuild.get('maven', 'no') != 'no':
+ if thisbuild['type'] == 'maven':
print "Cleaning Maven project..."
cmd = [config['mvn3'], 'clean', '-Dandroid.sdk.path=' + config['sdk_path']]
maven_dir = root_dir
p = FDroidPopen(cmd, cwd=maven_dir)
- elif thisbuild.get('gradle', 'no') != 'no':
+
+ elif thisbuild['type'] == 'gradle':
print "Cleaning Gradle project..."
cmd = [config['gradle'], 'clean']
gradle_dir = root_dir
p = FDroidPopen(cmd, cwd=gradle_dir)
- elif thisbuild.get('update', '.') != 'no' and thisbuild.get('kivy', 'no') == 'no':
+
+ elif thisbuild['type'] == 'kivy':
+ pass
+
+ elif thisbuild['type'] == 'ant':
print "Cleaning Ant project..."
- cmd = ['ant', 'clean']
- p = FDroidPopen(cmd, cwd=root_dir)
+ p = FDroidPopen(['ant', 'clean'], cwd=root_dir)
if p is not None and p.returncode != 0:
raise BuildException("Error cleaning %s:%s" %
(app['id'], thisbuild['version']), p.stdout, p.stderr)
- # Also clean jni
- print "Cleaning jni dirs..."
- for baddir in [
- 'libs/armeabi-v7a', 'libs/armeabi',
- 'libs/mips', 'libs/x86', 'obj']:
- badpath = os.path.join(build_dir, baddir)
- if os.path.exists(badpath):
- print "Removing '%s'" % badpath
- shutil.rmtree(badpath)
-
# Scan before building...
print "Scanning source for common problems..."
buildprobs = common.scan_source(build_dir, root_dir, thisbuild)
if len(buildprobs) > 0:
print 'Scanner found ' + str(len(buildprobs)) + ' problems:'
for problem in buildprobs:
- print '...' + problem
+ print ' %s' % problem
if not force:
raise BuildException("Can't build due to " +
str(len(buildprobs)) + " scanned problems")
tarname = common.getsrcname(app,thisbuild)
tarball = tarfile.open(os.path.join(tmp_dir, tarname), "w:gz")
def tarexc(f):
- for vcs_dir in ['.svn', '.git', '.hg', '.bzr']:
- if f.endswith(vcs_dir):
- return True
- return False
+ return any(f.endswith(s) for s in ['.svn', '.git', '.hg', '.bzr'])
tarball.add(build_dir, tarname, exclude=tarexc)
tarball.close()
print "Running 'build' commands in %s" % root_dir
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir)
-
+
if p.returncode != 0:
raise BuildException("Error running build command for %s:%s" %
(app['id'], thisbuild['version']), p.stdout, p.stderr)
p = None
# Build the release...
- if thisbuild.get('maven', 'no') != 'no':
+ if thisbuild['type'] == 'maven':
print "Building Maven project..."
if '@' in thisbuild['maven']:
else:
maven_dir = root_dir
- mvncmd = [config['mvn3'], '-Dandroid.sdk.path=' + config['sdk_path']]
- if install:
- mvncmd += ['-Dandroid.sign.debug=true', 'package', 'android:deploy']
- else:
- mvncmd += ['-Dandroid.sign.debug=false', '-Dandroid.release=true', 'package']
+ mvncmd = [config['mvn3'], '-Dandroid.sdk.path=' + config['sdk_path'],
+ '-Dandroid.sign.debug=false', '-Dandroid.release=true', 'package']
if 'target' in thisbuild:
target = thisbuild["target"].split('-')[1]
subprocess.call(['sed', '-i',
bindir = os.path.join(root_dir, 'target')
- elif thisbuild.get('kivy', 'no') != 'no':
+ elif thisbuild['type'] == 'kivy':
print "Building Kivy project..."
spec = os.path.join(root_dir, 'buildozer.spec')
raise BuildException("Expected to find buildozer-compatible spec at {0}"
.format(spec))
- defaults = {'orientation': 'landscape', 'icon': '',
+ defaults = {'orientation': 'landscape', 'icon': '',
'permissions': '', 'android.api': "18"}
bconfig = ConfigParser(defaults, allow_no_value=True)
bconfig.read(spec)
cmd += ' ANDROIDAPI=' + str(bconfig.get('app', 'android.api'))
cmd += ' VIRTUALENV=virtualenv'
cmd += ' ./distribute.sh'
- cmd += ' -m ' + "'" + ' '.join(modules) + "'"
+ cmd += ' -m ' + "'" + ' '.join(modules) + "'"
cmd += ' -d fdroid'
if subprocess.call(cmd, cwd='python-for-android', shell=True) != 0:
raise BuildException("Distribute build failed")
cmd.append('release')
p = FDroidPopen(cmd, cwd=distdir)
- elif thisbuild.get('gradle', 'no') != 'no':
+ elif thisbuild['type'] == 'gradle':
print "Building Gradle project..."
if '@' in thisbuild['gradle']:
flavour = thisbuild['gradle'].split('@')[0]
's@compileSdkVersion[ ]*[0-9]*@compileSdkVersion '+level+'@g',
'build.gradle'], cwd=gradle_dir)
- for root, dirs, files in os.walk(build_dir):
- for f in files:
- if f == 'build.gradle':
- adapt_gradle(os.path.join(root, f))
- break
+ adapt_gradle(gradle_dir)
+
+ for name, number, libpath in srclibpaths:
+ adapt_gradle(libpath)
if flavour in ['main', 'yes', '']:
flavour = ''
-
+
commands = [config['gradle']]
if 'preassemble' in thisbuild:
for task in thisbuild['preassemble'].split():
commands.append(task)
- if install:
- commands += ['assemble'+flavour+'Debug', 'install'+flavour+'Debug']
- else:
- commands += ['assemble'+flavour+'Release']
+ commands += ['assemble'+flavour+'Release']
p = FDroidPopen(commands, cwd=gradle_dir)
else:
print "Building Ant project..."
cmd = ['ant']
- if install:
- cmd += ['debug','install']
- elif 'antcommand' in thisbuild:
+ if 'antcommand' in thisbuild:
cmd += [thisbuild['antcommand']]
else:
cmd += ['release']
raise BuildException("Build failed for %s:%s" % (app['id'], thisbuild['version']), p.stdout, p.stderr)
print "Successfully built version " + thisbuild['version'] + ' of ' + app['id']
- if install:
- return
-
# Find the apk name in the output...
if 'bindir' in thisbuild:
bindir = os.path.join(build_dir, thisbuild['bindir'])
- if thisbuild.get('maven', 'no') != 'no':
+ if thisbuild['type'] == 'maven':
stdout_apk = '\n'.join([
line for line in p.stdout.splitlines() if any(a in line for a in ('.apk','.ap_'))])
m = re.match(r".*^\[INFO\] .*apkbuilder.*/([^/]*)\.apk",
raise BuildException('Failed to find output')
src = m.group(1)
src = os.path.join(bindir, src) + '.apk'
- elif thisbuild.get('kivy', 'no') != 'no':
+ elif thisbuild['type'] == 'kivy':
src = 'python-for-android/dist/default/bin/{0}-{1}-release.apk'.format(
bconfig.get('app', 'title'), bconfig.get('app', 'version'))
- elif thisbuild.get('gradle', 'no') != 'no':
+ elif thisbuild['type'] == 'gradle':
dd = build_dir
if 'subdir' in thisbuild:
dd = os.path.join(dd, thisbuild['subdir'])
for line in output.splitlines():
if line.startswith("package:"):
pat = re.compile(".*name='([a-zA-Z0-9._]*)'.*")
- foundid = re.match(pat, line).group(1)
+ m = pat.match(line)
+ if m:
+ foundid = m.group(1)
pat = re.compile(".*versionCode='([0-9]*)'.*")
- vercode = re.match(pat, line).group(1)
+ m = pat.match(line)
+ if m:
+ vercode = m.group(1)
pat = re.compile(".*versionName='([^']*)'.*")
- version = re.match(pat, line).group(1)
+ m = pat.match(line)
+ if m:
+ version = m.group(1)
+
if thisbuild['novcheck']:
vercode = thisbuild['vercode']
version = thisbuild['version']
def trybuild(app, thisbuild, build_dir, output_dir, also_check_dir, srclib_dir, extlib_dir,
- tmp_dir, repo_dir, vcs, test, server, install, force, onserver):
+ tmp_dir, repo_dir, vcs, test, server, force, onserver):
"""
Build a particular version of an application, if it needs building.
build_server(app, thisbuild, vcs, build_dir, output_dir, force)
else:
- build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, install, force, onserver)
+ build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver)
return True
def parse_commandline():
"""Parse the command line. Returns options, args."""
- parser = OptionParser()
+ parser = OptionParser(usage="Usage: %prog [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
parser.add_option("-v", "--verbose", action="store_true", default=False,
help="Spew out even more information than normal")
- parser.add_option("-p", "--package", default=None,
- help="Build only the specified package")
- parser.add_option("-c", "--vercode", default=None,
- help="Build only the specified version code")
parser.add_option("-l", "--latest", action="store_true", default=False,
- help="Build only the latest version code available")
+ help="Build only the latest version of each package")
parser.add_option("-s", "--stop", action="store_true", default=False,
help="Make the build stop on exceptions")
parser.add_option("-t", "--test", action="store_true", default=False,
help="Specify that we're running on the build server")
parser.add_option("-f", "--force", action="store_true", default=False,
help="Force build of disabled apps, and carries on regardless of scan problems. Only allowed in test mode.")
- parser.add_option("--install", action="store_true", default=False,
- help="Use 'ant debug install' to build and install a " +
- "debug version on your device or emulator. " +
- "Implies --force and --test")
- parser.add_option("--all", action="store_true", default=False,
- help="Use with --install, when not using --package"
- " to confirm you really want to build and install everything.")
+ parser.add_option("-a", "--all", action="store_true", default=False,
+ help="Build all applications available")
parser.add_option("-w", "--wiki", default=False, action="store_true",
help="Update the wiki")
options, args = parser.parse_args()
if options.onserver:
options.stop = True
- # The --install option implies --test and --force...
- if options.install:
- if options.server:
- print "Can't install when building on a build server."
- sys.exit(1)
- if not options.package and not options.all:
- print "This would build and install everything in the repo to the device."
- print "You probably want to use --package and maybe also --vercode."
- print "If you really want to install everything, use --all."
- sys.exit(1)
- options.force = True
- options.test = True
-
if options.force and not options.test:
- print "Force is only allowed in test mode"
- sys.exit(1)
+ raise OptionError("Force is only allowed in test mode", "force")
return options, args
global options, config
options, args = parse_commandline()
+ if not args and not options.all:
+ raise OptionError("If you really want to build all the apps, use --all", "all")
+
config = common.read_config(options)
if config['build_server_always']:
options.server = True
if options.resetserver and not options.server:
- print "Using --resetserver without --server makes no sense"
- sys.exit(1)
-
- # Get all apps...
- apps = metadata.read_metadata(xref=not options.onserver)
+ raise OptionError("Using --resetserver without --server makes no sense", "resetserver")
log_dir = 'logs'
if not os.path.isdir(log_dir):
srclib_dir = os.path.join(build_dir, 'srclib')
extlib_dir = os.path.join(build_dir, 'extlib')
- # Filter apps and build versions according to command-line options, etc...
- if options.package:
- apps = [app for app in apps if app['id'] == options.package]
- if len(apps) == 0:
- print "No such package"
- sys.exit(1)
+ # Get all apps...
+ allapps = metadata.read_metadata(xref=not options.onserver)
+
+ apps = common.read_app_args(args, allapps, True)
apps = [app for app in apps if (options.force or not app['Disabled']) and
- app['builds'] and len(app['Repo Type']) > 0 and len(app['builds']) > 0]
+ len(app['Repo Type']) > 0 and len(app['builds']) > 0]
+
if len(apps) == 0:
- print "Nothing to do - all apps are disabled or have no builds defined."
- sys.exit(1)
- if options.vercode:
- for app in apps:
- app['builds'] = [b for b in app['builds']
- if str(b['vercode']) == options.vercode]
- elif options.latest:
+ raise Exception("No apps to process.")
+
+ if options.latest:
for app in apps:
- m = max([i['vercode'] for i in app['builds']], key=int)
- app['builds'] = [b for b in app['builds'] if b['vercode'] == m]
+ app['builds'] = app['builds'][-1:]
if options.wiki:
import mwclient
print "Checking " + thisbuild['version']
if trybuild(app, thisbuild, build_dir, output_dir, also_check_dir,
srclib_dir, extlib_dir, tmp_dir, repo_dir, vcs, options.test,
- options.server, options.install, options.force, options.onserver):
+ options.server, options.force, options.onserver):
build_succeeded.append(app)
wikilog = "Build succeeded"
except BuildException as be:
import common, metadata
from common import BuildException
from common import VCSException
+from metadata import MetaDataException
# Check for a new version by looking at a document retrieved via HTTP.
hcode = "0"
for tag in vcs.gettags():
+ if options.verbose:
+ print "Check tag: '{0}'".format(tag)
vcs.gotorevision(tag)
# Only process tags where the manifest exists...
if not vercode:
return (None,"Couldn't find latest version code")
- return (version, str(int(vercode)))
+ vercode = str(int(vercode))
+
+ print "Manifest exists. Found version %s (%s)" % (version, vercode)
+
+ return (version, vercode)
except BuildException as be:
msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be)
global config, options
# Parse command line...
- parser = OptionParser()
+ parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
parser.add_option("-v", "--verbose", action="store_true", default=False,
help="Spew out even more information than normal")
- parser.add_option("-p", "--package", default=None,
- help="Check only the specified package")
parser.add_option("--auto", action="store_true", default=False,
help="Process auto-updates")
parser.add_option("--autoonly", action="store_true", default=False,
config = common.read_config(options)
# Get all apps...
- apps = metadata.read_metadata(options.verbose)
+ allapps = metadata.read_metadata(options.verbose)
- # Filter apps according to command-line options
- if options.package:
- apps = [app for app in apps if app['id'] == options.package]
- if len(apps) == 0:
- print "No such package"
- sys.exit(1)
+ apps = common.read_app_args(args, allapps, False)
if options.gplay:
for app in apps:
if reason == '404':
print "%s is not in the Play Store" % common.getappname(app)
else:
- print "%s encountered a problem: %s" % common.getappname(app)
+ print "%s encountered a problem: %s" % (common.getappname(app), reason)
if version is not None:
stored = app['Current Version']
if LooseVersion(stored) < LooseVersion(version):
for app in apps:
-
if options.autoonly and app['Auto Update Mode'] == 'None':
if options.verbose:
print "Nothing to do for %s..." % app['id']
elif mode.startswith('Version '):
pattern = mode[8:]
if pattern.startswith('+'):
- o = pattern.find(' ')
- suffix = pattern[1:o]
- pattern = pattern[o + 1:]
+ try:
+ suffix, pattern = pattern.split(' ', 1)
+ except ValueError:
+ raise MetaDataException("Invalid AUM: " + mode)
else:
suffix = ''
gotcur = False
'archive_older': 0,
'max_icon_size': 72,
'stats_to_carbon': False,
- 'repo_maxage': 0
+ 'repo_maxage': 0,
+ 'char_limits': {
+ 'Summary' : 50,
+ 'Description' : 1500
+ }
+
}
config = {}
return config
+# Given the arguments in the form of multiple appid:[vc] strings, this returns
+# a dictionary with the set of vercodes specified for each package.
+def read_pkg_args(args, allow_vercodes=False):
+
+ vercodes = {}
+ if not args:
+ return vercodes
+
+ for p in args:
+ if allow_vercodes and ':' in p:
+ package, vercode = p.split(':')
+ else:
+ package, vercode = p, None
+ if package not in vercodes:
+ vercodes[package] = [vercode] if vercode else []
+ continue
+ elif vercode and vercode not in vercodes[package]:
+ vercodes[package] += [vercode] if vercode else []
+
+ return vercodes
+
+# On top of what read_pkg_args does, this returns the whole app metadata, but
+# limiting the builds list to the builds matching the vercodes specified.
+def read_app_args(args, allapps, allow_vercodes=False):
+
+ vercodes = read_pkg_args(args, allow_vercodes)
+
+ if not vercodes:
+ return allapps
+
+ apps = [app for app in allapps if app['id'] in vercodes]
+
+ if not apps:
+ raise Exception("No packages specified")
+ if len(apps) != len(vercodes):
+ allids = [app["id"] for app in allapps]
+ for p in vercodes:
+ if p not in allids:
+ print "No such package: %s" % p
+ raise Exception("Found invalid app ids in arguments")
+
+ error = False
+ for app in apps:
+ vc = vercodes[app['id']]
+ if not vc:
+ continue
+ app['builds'] = [b for b in app['builds'] if b['vercode'] in vc]
+ if len(app['builds']) != len(vercodes[app['id']]):
+ error = True
+ allvcs = [b['vercode'] for b in app['builds']]
+ for v in vercodes[app['id']]:
+ if v not in allvcs:
+ print "No such vercode %s for app %s" % (v, app['id'])
+
+ if error:
+ raise Exception("Found invalid vercodes for some apps")
+
+ return apps
+
+def has_extension(filename, extension):
+ name, ext = os.path.splitext(filename)
+ ext = ext.lower()[1:]
+ return ext == extension
+
+apk_regex = None
+
+def apknameinfo(filename):
+ global apk_regex
+ filename = os.path.basename(filename)
+ if apk_regex is None:
+ apk_regex = re.compile(r"^(.+)_([0-9]+)\.apk$")
+ m = apk_regex.match(filename)
+ try:
+ result = (m.group(1), m.group(2))
+ except AttributeError:
+ raise Exception("Invalid apk name: %s" % filename)
+ return result
+
def getapkname(app, build):
return "%s_%s.apk" % (app['id'], build['vercode'])
return "%s_%s_src.tar.gz" % (app['id'], build['vercode'])
def getappname(app):
- if app['Name']:
- return '%s (%s)' % (app['Name'], app['id'])
- if app['Auto Name']:
- return '%s (%s)' % (app['Auto Name'], app['id'])
- return app['id']
+ if app['Name']:
+ return '%s (%s)' % (app['Name'], app['id'])
+ if app['Auto Name']:
+ return '%s (%s)' % (app['Auto Name'], app['id'])
+ return app['id']
def getcvname(app):
- return '%s (%s)' % (app['Current Version'], app['Current Version Code'])
+ return '%s (%s)' % (app['Current Version'], app['Current Version Code'])
def getvcs(vcstype, remote, local):
if vcstype == 'git':
def userargs(self):
if self.username is None:
return ['--non-interactive']
- return ['--username', self.username,
+ return ['--username', self.username,
'--password', self.password,
'--non-interactive']
if flavour:
possible_manifests.append(
os.path.join(app_dir, 'src', flavour, 'AndroidManifest.xml'))
-
+
return [path for path in possible_manifests if os.path.isfile(path)]
# Retrieve the package name
name_search = re.compile(r'.*android:label="([^"]+)".*').search
app_found = False
for f in manifest_paths(app_dir, flavour):
- if not f.endswith(".xml"):
+ if not has_extension(f, 'xml'):
continue
xml_dir = os.path.join(f[:-19], 'res', 'values')
for line in file(f):
# Retrieve the version name
def version_name(original, app_dir, flavour):
for f in manifest_paths(app_dir, flavour):
- if not f.endswith(".xml"):
+ if not has_extension(f, 'xml'):
continue
xml_dir = os.path.join(f[:-19], 'res', 'values')
string = retrieve_string(xml_dir, original)
vnsearch = re.compile(r'.*android:versionName="([^"]+?)".*').search
psearch = re.compile(r'.*package="([^"]+)".*').search
- vcsearch_g = re.compile(r'.*versionCode[ =]*([0-9]+?)[^\d].*').search
- vnsearch_g = re.compile(r'.*versionName[ =]*"([^"]+?)".*').search
- psearch_g = re.compile(r'.*packageName[ =]*"([^"]+)".*').search
+ vcsearch_g = re.compile(r'.*versionCode[ ]*[=]*[ ]*["\']*([0-9]+)["\']*').search
+ vnsearch_g = re.compile(r'.*versionName[ ]*[=]*[ ]*(["\'])((?:(?=(\\?))\3.)*?)\1.*').search
+ psearch_g = re.compile(r'.*packageName[ ]*[=]*[ ]*["\']([^"]+)["\'].*').search
max_version = None
max_vercode = None
for path in paths:
- gradle = path.endswith("gradle")
+ gradle = has_extension(path, 'gradle')
version = None
vercode = None
# Remember package name, may be defined separately from version+vercode
else:
matches = vnsearch(line)
if matches:
- version = matches.group(1)
+ version = matches.group(2 if gradle else 1)
if not vercode:
if gradle:
matches = vcsearch_g(line)
if p.returncode != 0:
raise BuildException("Error running prepare command for srclib %s"
% name, p.stdout, p.stderr)
-
+
if srclib["Update Project"] == "Yes":
print "Updating srclib %s at path %s" % (name, libdir)
cmd = [os.path.join(config['sdk_path'], 'tools', 'android'),
# Generate (or update) the ant build file, build.xml...
updatemode = build.get('update', 'auto')
- if (updatemode != 'no'
- and build.get('maven', 'no') == 'no'
- and build.get('kivy', 'no') == 'no'
- and build.get('gradle', 'no') == 'no'):
+ if (updatemode != 'no' and build['type'] == 'ant'):
parms = [os.path.join(config['sdk_path'], 'tools', 'android'),
'update', 'project']
if 'target' in build and build['target']:
os.remove(buildxml)
for d in update_dirs:
- # Remove gen and bin dirs in libraries
- # rid of them...
- for baddir in [
- 'gen', 'bin', 'obj', # ant
- 'libs/armeabi-v7a', 'libs/armeabi', # jni
- 'libs/mips', 'libs/x86']:
- badpath = os.path.join(root_dir, d, baddir)
- if os.path.exists(badpath):
- print "Removing '%s'" % badpath
- shutil.rmtree(badpath)
+ subdir = os.path.join(root_dir, d)
+ # Clean update dirs via ant
+ p = FDroidPopen(['ant', 'clean'], cwd=subdir)
dparms = parms + ['-p', d]
if options.verbose:
if d == '.':
f.close()
flavour = None
- if build.get('gradle', 'no') != 'no':
+ if build['type'] == 'gradle':
flavour = build['gradle'].split('@')[0]
if flavour in ['main', 'yes', '']:
flavour = None
for path in manifest_paths(root_dir, flavour):
if not os.path.isfile(path):
continue
- if path.endswith('.xml'):
+ if has_extension(path, 'xml'):
if subprocess.call(['sed','-i',
's/android:versionName="[^"]*"/android:versionName="' + build['version'] + '"/g',
path]) != 0:
raise BuildException("Failed to amend manifest")
- elif path.endswith('.gradle'):
+ elif has_extension(path, 'gradle'):
if subprocess.call(['sed','-i',
's/versionName[ ]*=[ ]*"[^"]*"/versionName = "' + build['version'] + '"/g',
path]) != 0:
for path in manifest_paths(root_dir, flavour):
if not os.path.isfile(path):
continue
- if path.endswith('.xml'):
+ if has_extension(path, 'xml'):
if subprocess.call(['sed','-i',
's/android:versionCode="[^"]*"/android:versionCode="' + build['vercode'] + '"/g',
path]) != 0:
raise BuildException("Failed to amend manifest")
- elif path.endswith('.gradle'):
+ elif has_extension(path, 'gradle'):
if subprocess.call(['sed','-i',
's/versionCode[ ]*=[ ]*[0-9]*/versionCode = ' + build['vercode'] + '/g',
path]) != 0:
if build['fixapos']:
for root, dirs, files in os.walk(os.path.join(root_dir, 'res')):
for filename in files:
- if filename.endswith('.xml'):
+ if has_extension(filename, 'xml'):
if subprocess.call(['sed','-i','s@' +
r"\([^\\]\)'@\1\\'" +
'@g',
if build['fixtrans']:
for root, dirs, files in os.walk(os.path.join(root_dir, 'res')):
for filename in files:
- if filename.endswith('.xml'):
+ if has_extension(filename, 'xml'):
f = open(os.path.join(root, filename))
changed = False
outlines = []
return (root_dir, srclibpaths)
-
# Scan the source code in the given directory (and all subdirectories)
# and return a list of potential problems.
def scan_source(build_dir, root_dir, thisbuild):
def removeproblem(what, fd, fp):
print 'Removing %s at %s' % (what, fd)
os.remove(fp)
-
+
def handleproblem(what, fd, fp):
if todelete(fd):
removeproblem(what, fd, fp)
else:
problems.append('Found %s at %s' % (what, fd))
+ def warnproblem(what, fd, fp):
+ print 'Warning: Found %s at %s' % (what, fd)
+
# Iterate through all files in the source code...
for r,d,f in os.walk(build_dir):
for curfile in f:
handleproblem('static library', fd, fp)
elif mime == 'application/x-executable':
handleproblem('binary executable', fd, fp)
- elif mime == 'application/jar' and fp.endswith('.apk'):
+ elif mime == 'application/jar' and has_extension(fp, 'apk'):
removeproblem('APK file', fd, fp)
+ elif mime == 'application/jar' and has_extension(fp, 'jar'):
+ warnproblem('JAR file', fd, fp)
- elif curfile.endswith('.java'):
+ elif has_extension(fp, 'java'):
for line in file(fp):
if 'DexClassLoader' in line:
handleproblem('DexClassLoader', fd, fp)
# Presence of a jni directory without buildjni=yes might
# indicate a problem... (if it's not a problem, explicitly use
# buildjni=no to bypass this check)
- if (os.path.exists(os.path.join(root_dir, 'jni')) and
+ if (os.path.exists(os.path.join(root_dir, 'jni')) and
thisbuild.get('buildjni') is None):
msg = 'Found jni directory, but buildjni is not enabled'
problems.append(msg)
print "ERROR: Failed to get apk manifest information"
sys.exit(1)
for line in output.splitlines():
- if line.find('android:debuggable') != -1 and not line.endswith('0x0'):
+ if 'android:debuggable' in line and not line.endswith('0x0'):
return True
return False
in a separate thread. Pushes read lines on a queue to
be consumed in another thread.
'''
-
+
def __init__(self, fd, queue):
assert isinstance(queue, Queue.Queue)
assert callable(fd.readline)
threading.Thread.__init__(self)
self._fd = fd
self._queue = queue
-
+
def run(self):
'''The body of the tread: read lines and put them on the queue.'''
for line in iter(self._fd.readline, ''):
self._queue.put(line)
-
+
def eof(self):
'''Check whether there is no more content to expect.'''
return not self.is_alive() and self._queue.empty()
"""
Runs a command the FDroid way and returns return code and output
- :param commands, cwd: like subprocess.Popen
+ :param commands and cwd like in subprocess.Popen
"""
if options.verbose:
result = PopenResult()
p = subprocess.Popen(commands, cwd=cwd,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-
+
stdout_queue = Queue.Queue()
stdout_reader = AsynchronousFileReader(p.stdout, stdout_queue)
stdout_reader.start()
stderr_queue = Queue.Queue()
stderr_reader = AsynchronousFileReader(p.stderr, stderr_queue)
stderr_reader.start()
-
+
# Check the queues for output (until there is no more to get)
while not stdout_reader.eof() or not stderr_reader.eof():
# Show what we received from standard output
sys.stderr.write(line)
sys.stderr.flush()
result.stderr += line
- time.sleep(0.2)
+ time.sleep(0.1)
p.communicate()
result.returncode = p.returncode
with open(path, "r") as o:
lines = o.readlines()
-
+
opened = 0
with open(path, "w") as o:
for line in lines:
import java.util.jar.JarFile;
public class getsig {
-
+
public static void main(String[] args) {
String apkPath = null;
System.out.println("Specify the APK file to get the signature from!");
System.exit(1);
}
-
+
try {
JarFile apk = new JarFile(apkPath);
java.security.cert.Certificate[] certs = null;
-
+
Enumeration entries = apk.entries();
while (entries.hasMoreElements()) {
JarEntry je = (JarEntry) entries.nextElement();
#
# import.py - part of the FDroid server tools
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
+# Copyright (C) 2013 Daniel Martí <mvdan@mvdan.cc>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
else:
spec = os.path.join(root_dir, 'buildozer.spec')
if os.path.exists(spec):
- defaults = {'orientation': 'landscape', 'icon': '',
+ defaults = {'orientation': 'landscape', 'icon': '',
'permissions': '', 'android.api': "18"}
bconfig = ConfigParser(defaults, allow_no_value=True)
bconfig.read(spec)
--- /dev/null
+#!/usr/bin/env python2
+# -*- coding: utf-8 -*-
+#
+# verify.py - part of the FDroid server tools
+# Copyright (C) 2013, Ciaran Gultnieks, ciaran@ciarang.com
+# Copyright (C) 2013 Daniel Martí <mvdan@mvdan.cc>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import sys
+import os
+import glob
+from optparse import OptionParser, OptionError
+
+import common
+from common import FDroidPopen
+
+options = None
+config = None
+
+def devices():
+ p = FDroidPopen(["adb", "devices"])
+ if p.returncode != 0:
+ raise Exception("An error occured when finding devices: %s" % p.stderr)
+ lines = p.stdout.splitlines()
+ if lines[0].startswith('* daemon not running'):
+ lines = lines[2:]
+ if len(lines) < 3:
+ return []
+ lines = lines[1:-1]
+ return [l.split()[0] for l in lines]
+
+
+def main():
+
+ global options, config
+
+ # Parse command line...
+ parser = OptionParser(usage="Usage: %prog [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
+ parser.add_option("-v", "--verbose", action="store_true", default=False,
+ help="Spew out even more information than normal")
+ parser.add_option("-a", "--all", action="store_true", default=False,
+ help="Install all signed applications available")
+ (options, args) = parser.parse_args()
+
+ if not args and not options.all:
+ raise OptionError("If you really want to install all the signed apps, use --all", "all")
+
+ config = common.read_config(options)
+
+ output_dir = 'repo'
+ if not os.path.isdir(output_dir):
+ print "No signed output directory - nothing to do"
+ sys.exit(0)
+
+ if args:
+
+ vercodes = common.read_pkg_args(args, True)
+ apks = { appid : None for appid in vercodes }
+
+ # Get the signed apk with the highest vercode
+ for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))):
+
+ appid, vercode = common.apknameinfo(apkfile)
+ if appid not in apks:
+ continue
+ if vercodes[appid] and vercode not in vercodes[appid]:
+ continue
+ apks[appid] = apkfile
+
+ for appid, apk in apks.iteritems():
+ if not apk:
+ raise Exception("No signed apk available for %s" % appid)
+
+ else:
+
+ apks = { common.apknameinfo(apkfile)[0] : apkfile for apkfile in
+ sorted(glob.glob(os.path.join(output_dir, '*.apk'))) }
+
+ for appid, apk in apks.iteritems():
+ # Get device list each time to avoid device not found errors
+ devs = devices()
+ if not devs:
+ raise Exception("No attached devices found")
+ print "Installing %s..." % apk
+ for dev in devs:
+ print "Installing %s on %s..." % (apk, dev)
+ p = FDroidPopen(["adb", "-s", dev, "install", apk ])
+ fail= ""
+ for line in p.stdout.splitlines():
+ if line.startswith("Failure"):
+ fail = line[9:-1]
+ if fail:
+ if fail == "INSTALL_FAILED_ALREADY_EXISTS":
+ print "%s is already installed on %s." % (apk, dev)
+ else:
+ raise Exception("Failed to install %s on %s: %s" % (
+ apk, dev, fail))
+
+ print "\nFinished"
+
+if __name__ == "__main__":
+ main()
+
--- /dev/null
+#!/usr/bin/env python2
+# -*- coding: utf-8 -*-
+#
+# rewritemeta.py - part of the FDroid server tool
+# Copyright (C) 2010-12, Ciaran Gultnieks, ciaran@ciarang.com
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See th
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public Licen
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+from optparse import OptionParser
+import common, metadata
+
+config = None
+options = None
+
+appid = None
+
+def warn(message):
+ global appid
+ if appid:
+ print "%s:" % appid
+ appid = None
+ print(' %s' % message)
+
+def main():
+
+ global config, options, appid
+
+ # Parse command line...
+ parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
+ parser.add_option("-v", "--verbose", action="store_true", default=False,
+ help="Spew out even more information than normal")
+ (options, args) = parser.parse_args()
+
+ config = common.read_config(options)
+
+ # Get all apps...
+ allapps = metadata.read_metadata(xref=False)
+ apps = common.read_app_args(args, allapps, False)
+
+ for app in apps:
+ appid = app['id']
+ lastcommit = ''
+
+ for build in app['builds']:
+ if 'commit' in build and 'disable' not in build:
+ lastcommit = build['commit']
+
+ if (app['Update Check Mode'] == 'RepoManifest' and
+ any(s in lastcommit for s in ('.', ',', '_', '-', '/'))):
+ warn("Last used commit '%s' looks like a tag, but Update Check Mode is RepoManifest" % lastcommit)
+
+ summ_chars = len(app['Summary'])
+ if summ_chars > config['char_limits']['Summary']:
+ warn("Summary of length %s is over the %i char limit" % (
+ summ_chars, config['char_limits']['Summary']))
+
+ if app['Summary']:
+ lastchar = app['Summary'][-1]
+ if any(lastchar==c for c in ['.', ',', '!', '?']):
+ warn("Summary should not end with a %s" % lastchar)
+
+ desc_chars = 0
+ for line in app['Description']:
+ desc_chars += len(line)
+ if desc_chars > config['char_limits']['Description']:
+ warn("Description of length %s is over the %i char limit" % (
+ desc_chars, config['char_limits']['Description']))
+
+ if not appid:
+ print
+
+ print "Finished."
+
+if __name__ == "__main__":
+ main()
+
def __str__(self):
return repr(self.value)
-# Designates a metadata field type and checks that it matches
+# Designates a metadata field type and checks that it matches
#
# 'name' - The long name of the field type
# 'matching' - List of possible values or regex expression
class FieldType():
def __init__(self, name, matching, sep, fields, attrs):
self.name = name
+ self.matching = matching
if type(matching) is str:
- self.matching = re.compile(matching)
- elif type(matching) is list:
- self.matching = matching
+ self.compiled = re.compile(matching)
self.sep = sep
self.fields = fields
self.attrs = attrs
def _assert_regex(self, values, appid):
for v in values:
- if not self.matching.match(v):
- raise MetaDataException("'%s' is not a valid %s in %s"
- % (v, self.name, appid))
+ if not self.compiled.match(v):
+ raise MetaDataException("'%s' is not a valid %s in %s. "
+ % (v, self.name, appid) +
+ "Regex pattern: %s" % (self.matching))
def _assert_list(self, values, appid):
for v in values:
if v not in self.matching:
- raise MetaDataException("'%s' is not a valid %s in %s"
- % (v, self.name, appid))
+ raise MetaDataException("'%s' is not a valid %s in %s. "
+ % (v, self.name, appid) +
+ "Possible values: %s" % (", ".join(self.matching)))
def check(self, value, appid):
if type(value) is not str or not value:
# Generic value types
valuetypes = {
'int' : FieldType("Integer",
- r'^[0-9]+$', None,
+ r'^[1-9][0-9]*$', None,
[ 'FlattrID' ],
[ 'vercode' ]),
[ "Litecoin" ],
[ ]),
+ 'dogecoin' : FieldType("Dogecoin address",
+ r'^D[a-zA-Z0-9]{33}$', None,
+ [ "Dogecoin" ],
+ [ ]),
+
'Bool' : FieldType("Boolean",
['Yes', 'No'], None,
[ "Requires Root" ],
if not line or line.startswith("#"):
continue
- index = line.find(':')
- if index == -1:
+ try:
+ field, value = line.split(':',1)
+ except ValueError:
raise MetaDataException("Invalid metadata in " + metafile.name + " at: " + line)
- field = line[:index]
- value = line[index+1:]
if field == "Subdir":
thisinfo[field] = value.split(',')
thisinfo['comments'].append((key, comment))
del curcomments[:]
+ def get_build_type(build):
+ for t in ['maven', 'gradle', 'kivy']:
+ if build.get(t, 'no') != 'no':
+ return t
+ return 'ant'
thisinfo = {}
if metafile:
# Defaults for fields that come from metadata...
thisinfo['Name'] = None
+ thisinfo['Provides'] = None
thisinfo['Auto Name'] = ''
thisinfo['Categories'] = 'None'
thisinfo['Description'] = []
thisinfo['FlattrID'] = None
thisinfo['Bitcoin'] = None
thisinfo['Litecoin'] = None
+ thisinfo['Dogecoin'] = None
thisinfo['Disabled'] = None
thisinfo['AntiFeatures'] = None
thisinfo['Archive Policy'] = None
if line.startswith("#"):
curcomments.append(line)
continue
- index = line.find(':')
- if index == -1:
+ try:
+ field, value = line.split(':',1)
+ except ValueError:
raise MetaDataException("Invalid metadata in " + metafile.name + " at: " + line)
- field = line[:index]
- value = line[index+1:]
# Translate obsolete fields...
if field == 'Market Version':
if not thisinfo['Description']:
thisinfo['Description'].append('No description available')
+ for build in thisinfo['builds']:
+ build['type'] = get_build_type(build)
+
return thisinfo
# Write a metadata file.
writefield('Disabled')
if app['AntiFeatures']:
writefield('AntiFeatures')
+ if app['Provides']:
+ writefield('Provides')
writefield('Categories')
writefield('License')
writefield('Web Site')
writefield('Bitcoin')
if app['Litecoin']:
writefield('Litecoin')
+ if app['Dogecoin']:
+ writefield('Dogecoin')
mf.write('\n')
if app['Name']:
writefield('Name')
'preassemble', 'bindir', 'antcommand', 'novcheck']
def write_builditem(key, value):
- if key in ['version', 'vercode', 'origlines']:
+ if key in ['version', 'vercode', 'origlines', 'type']:
return
if key in valuetypes['bool'].attrs:
if not value:
global config, options
# Parse command line...
- parser = OptionParser()
+ parser = OptionParser(usage="Usage: %prog [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
parser.add_option("-v", "--verbose", action="store_true", default=False,
help="Spew out even more information than normal")
- parser.add_option("-p", "--package", default=None,
- help="Publish only the specified package")
(options, args) = parser.parse_args()
config = common.read_config(options)
# and b) a sane-looking ID that would make its way into the repo.
# Nonetheless, to be sure, before publishing we check that there are no
# collisions, and refuse to do any publishing if that's the case...
- apps = metadata.read_metadata()
+ allapps = metadata.read_metadata()
+ vercodes = common.read_pkg_args(args, True)
allaliases = []
- for app in apps:
+ for app in allapps:
m = md5.new()
m.update(app['id'])
keyalias = m.hexdigest()[:8]
sys.exit(1)
allaliases.append(keyalias)
if options.verbose:
- print "{0} apps, {0} key aliases".format(len(apps), len(allaliases))
+ print "{0} apps, {0} key aliases".format(len(allapps), len(allaliases))
# Process any apks that are waiting to be signed...
for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):
+ appid, vercode = common.apknameinfo(apkfile)
apkfilename = os.path.basename(apkfile)
- i = apkfilename.rfind('_')
- if i == -1:
- raise BuildException("Invalid apk name")
- appid = apkfilename[:i]
- print "Processing " + appid
-
- if not options.package or options.package == appid:
-
- # Figure out the key alias name we'll use. Only the first 8
- # characters are significant, so we'll use the first 8 from
- # the MD5 of the app's ID and hope there are no collisions.
- # If a collision does occur later, we're going to have to
- # come up with a new alogrithm, AND rename all existing keys
- # in the keystore!
- if appid in config['keyaliases']:
- # For this particular app, the key alias is overridden...
- keyalias = config['keyaliases'][appid]
- if keyalias.startswith('@'):
- m = md5.new()
- m.update(keyalias[1:])
- keyalias = m.hexdigest()[:8]
- else:
+ if vercodes and appid not in vercodes:
+ continue
+ if appid in vercodes and vercodes[appid]:
+ if vercode not in vercodes[appid]:
+ continue
+ print "Processing " + apkfile
+
+ # Figure out the key alias name we'll use. Only the first 8
+ # characters are significant, so we'll use the first 8 from
+ # the MD5 of the app's ID and hope there are no collisions.
+ # If a collision does occur later, we're going to have to
+ # come up with a new alogrithm, AND rename all existing keys
+ # in the keystore!
+ if appid in config['keyaliases']:
+ # For this particular app, the key alias is overridden...
+ keyalias = config['keyaliases'][appid]
+ if keyalias.startswith('@'):
m = md5.new()
- m.update(appid)
+ m.update(keyalias[1:])
keyalias = m.hexdigest()[:8]
- print "Key alias: " + keyalias
-
- # See if we already have a key for this application, and
- # if not generate one...
- p = subprocess.Popen(['keytool', '-list',
- '-alias', keyalias, '-keystore', config['keystore'],
- '-storepass', config['keystorepass']], stdout=subprocess.PIPE)
- output = p.communicate()[0]
- if p.returncode !=0:
- print "Key does not exist - generating..."
- p = subprocess.Popen(['keytool', '-genkey',
- '-keystore', config['keystore'], '-alias', keyalias,
- '-keyalg', 'RSA', '-keysize', '2048',
- '-validity', '10000',
- '-storepass', config['keystorepass'],
- '-keypass', config['keypass'],
- '-dname', config['keydname']], stdout=subprocess.PIPE)
- output = p.communicate()[0]
- print output
- if p.returncode != 0:
- raise BuildException("Failed to generate key")
-
- # Sign the application...
- p = subprocess.Popen(['jarsigner', '-keystore', config['keystore'],
+ else:
+ m = md5.new()
+ m.update(appid)
+ keyalias = m.hexdigest()[:8]
+ print "Key alias: " + keyalias
+
+ # See if we already have a key for this application, and
+ # if not generate one...
+ p = subprocess.Popen(['keytool', '-list',
+ '-alias', keyalias, '-keystore', config['keystore'],
+ '-storepass', config['keystorepass']], stdout=subprocess.PIPE)
+ output = p.communicate()[0]
+ if p.returncode !=0:
+ print "Key does not exist - generating..."
+ p = subprocess.Popen(['keytool', '-genkey',
+ '-keystore', config['keystore'], '-alias', keyalias,
+ '-keyalg', 'RSA', '-keysize', '2048',
+ '-validity', '10000',
'-storepass', config['keystorepass'],
- '-keypass', config['keypass'], '-sigalg',
- 'MD5withRSA', '-digestalg', 'SHA1',
- apkfile, keyalias], stdout=subprocess.PIPE)
- output = p.communicate()[0]
- print output
- if p.returncode != 0:
- raise BuildException("Failed to sign application")
-
- # Zipalign it...
- p = subprocess.Popen([os.path.join(config['sdk_path'],'tools','zipalign'),
- '-v', '4', apkfile,
- os.path.join(output_dir, apkfilename)],
- stdout=subprocess.PIPE)
+ '-keypass', config['keypass'],
+ '-dname', config['keydname']], stdout=subprocess.PIPE)
output = p.communicate()[0]
print output
if p.returncode != 0:
- raise BuildException("Failed to align application")
- os.remove(apkfile)
-
- # Move the source tarball into the output directory...
- tarfilename = apkfilename[:-4] + '_src.tar.gz'
- shutil.move(os.path.join(unsigned_dir, tarfilename),
- os.path.join(output_dir, tarfilename))
-
- print 'Published ' + apkfilename
+ raise BuildException("Failed to generate key")
+
+ # Sign the application...
+ p = subprocess.Popen(['jarsigner', '-keystore', config['keystore'],
+ '-storepass', config['keystorepass'],
+ '-keypass', config['keypass'], '-sigalg',
+ 'MD5withRSA', '-digestalg', 'SHA1',
+ apkfile, keyalias], stdout=subprocess.PIPE)
+ output = p.communicate()[0]
+ print output
+ if p.returncode != 0:
+ raise BuildException("Failed to sign application")
+
+ # Zipalign it...
+ p = subprocess.Popen([os.path.join(config['sdk_path'],'tools','zipalign'),
+ '-v', '4', apkfile,
+ os.path.join(output_dir, apkfilename)],
+ stdout=subprocess.PIPE)
+ output = p.communicate()[0]
+ print output
+ if p.returncode != 0:
+ raise BuildException("Failed to align application")
+ os.remove(apkfile)
+
+ # Move the source tarball into the output directory...
+ tarfilename = apkfilename[:-4] + '_src.tar.gz'
+ shutil.move(os.path.join(unsigned_dir, tarfilename),
+ os.path.join(output_dir, tarfilename))
+
+ print 'Published ' + apkfilename
if __name__ == "__main__":
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import sys
import os
from optparse import OptionParser
import common, metadata
global config, options
# Parse command line...
- parser = OptionParser()
+ parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
parser.add_option("-v", "--verbose", action="store_true", default=False,
help="Spew out even more information than normal")
- parser.add_option("-p", "--package", default=None,
- help="Process only the specified package")
(options, args) = parser.parse_args()
config = common.read_config(options)
# Get all apps...
- apps = metadata.read_metadata(package=options.package, xref=False)
-
- if len(apps) == 0 and options.package:
- print "No such package"
- sys.exit(1)
+ allapps = metadata.read_metadata(xref=False)
+ apps = common.read_app_args(args, allapps, False)
for app in apps:
print "Writing " + app['id']
- metadata.write_metadata(os.path.join('metadata', app['id']) + '.txt', app)
+ metadata.write_metadata(os.path.join('metadata', app['id'])+'.txt', app)
print "Finished."
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import sys
import os
import traceback
from optparse import OptionParser
global config, options
# Parse command line...
- parser = OptionParser()
+ parser = OptionParser(usage="Usage: %prog [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
parser.add_option("-v", "--verbose", action="store_true", default=False,
help="Spew out even more information than normal")
- parser.add_option("-p", "--package", default=None,
- help="Scan only the specified package")
parser.add_option("--nosvn", action="store_true", default=False,
help="Skip svn repositories - for test purposes, because they are too slow.")
(options, args) = parser.parse_args()
config = common.read_config(options)
# Get all apps...
- apps = metadata.read_metadata()
-
- # Filter apps according to command-line options
- if options.package:
- apps = [app for app in apps if app['id'] == options.package]
- if len(apps) == 0:
- print "No such package"
- sys.exit(1)
+ allapps = metadata.read_metadata()
+ apps = common.read_app_args(args, allapps, True)
problems = []
for app in apps:
- skip = False
if app['Disabled']:
print "Skipping %s: disabled" % app['id']
- skip = True
- elif not app['builds']:
+ continue
+ if not app['builds']:
print "Skipping %s: no builds specified" % app['id']
- skip = True
+ continue
elif options.nosvn and app['Repo Type'] == 'svn':
- skip = True
-
- if not skip:
+ continue
- print "Processing " + app['id']
+ print "Processing " + app['id']
- try:
+ try:
- build_dir = 'build/' + app['id']
+ build_dir = 'build/' + app['id']
- # Set up vcs interface and make sure we have the latest code...
- vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
+ # Set up vcs interface and make sure we have the latest code...
+ vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
- for thisbuild in app['builds']:
+ for thisbuild in app['builds']:
- if 'disable' in thisbuild:
- print ("..skipping version " + thisbuild['version'] + " - " +
- thisbuild.get('disable', thisbuild['commit'][1:]))
- else:
- print "..scanning version " + thisbuild['version']
+ if 'disable' in thisbuild:
+ print ("..skipping version " + thisbuild['version'] + " - " +
+ thisbuild.get('disable', thisbuild['commit'][1:]))
+ else:
+ print "..scanning version " + thisbuild['version']
- # Prepare the source code...
- root_dir, _ = common.prepare_source(vcs, app, thisbuild,
- build_dir, srclib_dir, extlib_dir, False)
+ # Prepare the source code...
+ root_dir, _ = common.prepare_source(vcs, app, thisbuild,
+ build_dir, srclib_dir, extlib_dir, False)
- # Do the scan...
- buildprobs = common.scan_source(build_dir, root_dir, thisbuild)
- for problem in buildprobs:
- problems.append(problem +
- ' in ' + app['id'] + ' ' + thisbuild['version'])
+ # Do the scan...
+ buildprobs = common.scan_source(build_dir, root_dir, thisbuild)
+ for problem in buildprobs:
+ problems.append(problem +
+ ' in ' + app['id'] + ' ' + thisbuild['version'])
- except BuildException as be:
- msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be)
- problems.append(msg)
- except VCSException as vcse:
- msg = "VCS error while scanning app %s: %s" % (app['id'], vcse)
- problems.append(msg)
- except Exception:
- msg = "Could not scan app %s due to unknown error: %s" % (app['id'], traceback.format_exc())
- problems.append(msg)
+ except BuildException as be:
+ msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be)
+ problems.append(msg)
+ except VCSException as vcse:
+ msg = "VCS error while scanning app %s: %s" % (app['id'], vcse)
+ problems.append(msg)
+ except Exception:
+ msg = "Could not scan app %s due to unknown error: %s" % (app['id'], traceback.format_exc())
+ problems.append(msg)
print "Finished:"
for problem in problems:
help="Spew out even more information than normal")
parser.add_option("-d", "--download", action="store_true", default=False,
help="Download logs we don't have")
+ parser.add_option("--nologs", action="store_true", default=False,
+ help="Don't do anything logs-related")
(options, args) = parser.parse_args()
config = common.read_config(options)
if ssh is not None:
ssh.close()
- # Process logs
- if options.verbose:
- print 'Processing logs...'
- logexpr = '(?P<ip>[.:0-9a-fA-F]+) - - \[(?P<time>.*?)\] "GET (?P<uri>.*?) HTTP/1.\d" (?P<statuscode>\d+) \d+ "(?P<referral>.*?)" "(?P<useragent>.*?)"'
- logsearch = re.compile(logexpr).search
- apps = {}
- unknownapks = []
knownapks = common.KnownApks()
- for logfile in glob.glob(os.path.join(logsdir,'access-*.log.gz')):
+ unknownapks = []
+
+ if not options.nologs:
+ # Process logs
if options.verbose:
- print '...' + logfile
- p = subprocess.Popen(["zcat", logfile], stdout = subprocess.PIPE)
- matches = (logsearch(line) for line in p.stdout)
- for match in matches:
- if match and match.group('statuscode') == '200':
- uri = match.group('uri')
- if uri.endswith('.apk'):
- _, apkname = os.path.split(uri)
- app = knownapks.getapp(apkname)
- if app:
- appid, _ = app
- if appid in apps:
- apps[appid] += 1
+ print 'Processing logs...'
+ apps = {}
+ logexpr = '(?P<ip>[.:0-9a-fA-F]+) - - \[(?P<time>.*?)\] "GET (?P<uri>.*?) HTTP/1.\d" (?P<statuscode>\d+) \d+ "(?P<referral>.*?)" "(?P<useragent>.*?)"'
+ logsearch = re.compile(logexpr).search
+ for logfile in glob.glob(os.path.join(logsdir,'access-*.log.gz')):
+ if options.verbose:
+ print '...' + logfile
+ p = subprocess.Popen(["zcat", logfile], stdout = subprocess.PIPE)
+ matches = (logsearch(line) for line in p.stdout)
+ for match in matches:
+ if match and match.group('statuscode') == '200':
+ uri = match.group('uri')
+ if uri.endswith('.apk'):
+ _, apkname = os.path.split(uri)
+ app = knownapks.getapp(apkname)
+ if app:
+ appid, _ = app
+ if appid in apps:
+ apps[appid] += 1
+ else:
+ apps[appid] = 1
else:
- apps[appid] = 1
- else:
- if not apkname in unknownapks:
- unknownapks.append(apkname)
-
- # Calculate and write stats for total downloads...
- lst = []
- alldownloads = 0
- for app, count in apps.iteritems():
- lst.append(app + " " + str(count))
- if config['stats_to_carbon']:
- carbon_send('fdroid.download.' + app.replace('.', '_'), count)
- alldownloads += count
- lst.append("ALL " + str(alldownloads))
- f = open('stats/total_downloads_app.txt', 'w')
- f.write('# Total downloads by application, since October 2011\n')
- for line in sorted(lst):
- f.write(line + '\n')
- f.close()
+ if not apkname in unknownapks:
+ unknownapks.append(apkname)
+
+ # Calculate and write stats for total downloads...
+ lst = []
+ alldownloads = 0
+ for app, count in apps.iteritems():
+ lst.append(app + " " + str(count))
+ if config['stats_to_carbon']:
+ carbon_send('fdroid.download.' + app.replace('.', '_'), count)
+ alldownloads += count
+ lst.append("ALL " + str(alldownloads))
+ f = open('stats/total_downloads_app.txt', 'w')
+ f.write('# Total downloads by application, since October 2011\n')
+ for line in sorted(lst):
+ f.write(line + '\n')
+ f.close()
# Calculate and write stats for repo types...
+ if options.verbose:
+ print "Processing repo types..."
repotypes = {}
for app in metaapps:
if len(app['Repo Type']) == 0:
f.close()
# Calculate and write stats for update check modes...
+ if options.verbose:
+ print "Processing update check modes..."
ucms = {}
for app in metaapps:
checkmode = app['Update Check Mode'].split('/')[0]
f.write(checkmode + ' ' + str(count) + '\n')
f.close()
+ if options.verbose:
+ print "Processing categories..."
ctgs = {}
for app in metaapps:
if app['Categories'] is None:
f.write(category + ' ' + str(count) + '\n')
f.close()
+ if options.verbose:
+ print "Processing antifeatures..."
afs = {}
for app in metaapps:
if app['AntiFeatures'] is None:
for antifeature, count in afs.iteritems():
f.write(antifeature + ' ' + str(count) + '\n')
f.close()
- return
# Calculate and write stats for licenses...
+ if options.verbose:
+ print "Processing licenses..."
licenses = {}
for app in metaapps:
license = app['License']
f.close()
# Write list of latest apps added to the repo...
+ if options.verbose:
+ print "Processing latest apps..."
latest = knownapks.getlatest(10)
f = open('stats/latestapps.txt', 'w')
for app in latest:
f.write(app + '\n')
f.close()
- if len(unknownapks) > 0:
+ if unknownapks:
print '\nUnknown apks:'
for apk in unknownapks:
print apk
from metadata import MetaDataException
from PIL import Image
+
+def get_densities():
+ return ['640', '480', '320', '240', '160', '120']
+
+def dpi_to_px(density):
+ return (int(density) * 48) / 160
+
+def px_to_dpi(px):
+ return (int(px) * 160) / 48
+
+def get_icon_dir(repodir, density):
+ if density is None:
+ return os.path.join(repodir, "icons")
+ return os.path.join(repodir, "icons-%s" % density)
+
+def get_icon_dirs(repodir):
+ for density in get_densities():
+ yield get_icon_dir(repodir, density)
+ yield os.path.join(repodir, "icons")
+
def update_wiki(apps, apks):
"""Update the wiki
if app['AntiFeatures']:
for af in app['AntiFeatures'].split(','):
wikidata += '{{AntiFeature|' + af + '}}\n'
- wikidata += '{{App|id=%s|name=%s|added=%s|lastupdated=%s|source=%s|tracker=%s|web=%s|donate=%s|flattr=%s|bitcoin=%s|litecoin=%s|license=%s|root=%s}}\n'%(
+ wikidata += '{{App|id=%s|name=%s|added=%s|lastupdated=%s|source=%s|tracker=%s|web=%s|donate=%s|flattr=%s|bitcoin=%s|litecoin=%s|dogecoin=%s|license=%s|root=%s}}\n'%(
app['id'],
app['Name'],
time.strftime('%Y-%m-%d', app['added']) if 'added' in app else '',
app['FlattrID'],
app['Bitcoin'],
app['Litecoin'],
+ app['Dogecoin'],
app['License'],
app.get('Requires Root', 'No'))
+ if app['Provides']:
+ wikidata += "This app provides: %s" % ', '.join(app['Summary'].split(','))
+
wikidata += app['Summary']
wikidata += " - [http://f-droid.org/repository/browse/?fdid=" + app['id'] + " view in repository]\n\n"
wikidata += '\n[[Category:Apps that are disabled]]\n'
if app['Update Check Mode'] == 'None' and not app['Disabled']:
wikidata += '\n[[Category:Apps with no update check]]\n'
+ for appcat in [c.strip() for c in app['Categories'].split(',')]:
+ wikidata += '\n[[Category:{0}]]\n'.format(appcat)
# We can't have underscores in the page name, even if they're in
# the package ID, because MediaWiki messes with them...
if apkfilename in apkcache:
del apkcache[apkfilename]
-def resize_icon(iconpath):
+def resize_icon(iconpath, density):
+
+ if not os.path.isfile(iconpath):
+ return
+
try:
im = Image.open(iconpath)
- if any(length > config['max_icon_size'] for length in im.size):
- print iconpath, "is too large:", im.size
- im.thumbnail((config['max_icon_size'], config['max_icon_size']),
- Image.ANTIALIAS)
- print iconpath, "new size:", im.size
+ size = dpi_to_px(density)
+
+ if any(length > size for length in im.size):
+ oldsize = im.size
+ im.thumbnail((size, size), Image.ANTIALIAS)
+ print iconpath, "was too large at", oldsize, "- new size is", im.size
im.save(iconpath, "PNG")
+
else:
if options.verbose:
print iconpath, "is small enough:", im.size
+
except Exception,e:
- print "ERROR: Failed processing {0} - {1}".format(iconpath, e)
+ print "WARNING: Failed resizing {0} - {1}".format(iconpath, e)
def resize_all_icons(repodirs):
"""Resize all icons that exceed the max size
- :param apps: list of all applications, as per metadata.read_metadata
:param repodirs: the repo directories to process
"""
for repodir in repodirs:
- for iconpath in glob.glob(os.path.join(repodir, 'icons', '*.png')):
- resize_icon(iconpath)
+ for density in get_densities():
+ icon_dir = get_icon_dir(repodir, density)
+ icon_glob = os.path.join(icon_dir, '*.png')
+ for iconpath in glob.glob(icon_glob):
+ resize_icon(iconpath, density)
def scan_apks(apps, apkcache, repodir, knownapks):
"""Scan the apks in the given repo directory.
cachechanged = False
- icon_dir = os.path.join(repodir ,'icons')
- # Delete and re-create the icon directory...
- if options.clean and os.path.exists(icon_dir):
- shutil.rmtree(icon_dir)
- if not os.path.exists(icon_dir):
- os.makedirs(icon_dir)
+ icon_dirs = get_icon_dirs(repodir)
+ for icon_dir in icon_dirs:
+ if os.path.exists(icon_dir):
+ if options.clean:
+ shutil.rmtree(icon_dir)
+ os.makedirs(icon_dir)
+ else:
+ os.makedirs(icon_dir)
+
apks = []
name_pat = re.compile(".*name='([a-zA-Z0-9._]*)'.*")
vercode_pat = re.compile(".*versionCode='([0-9]*)'.*")
vername_pat = re.compile(".*versionName='([^']*)'.*")
label_pat = re.compile(".*label='(.*?)'(\n| [a-z]*?=).*")
- icon_pat = re.compile(".*icon='([^']+?)'.*")
+ icon_pat = re.compile(".*application-icon-([0-9]+):'([^']+?)'.*")
+ icon_pat_nodpi = re.compile(".*icon='([^']+?)'.*")
sdkversion_pat = re.compile(".*'([0-9]*)'.*")
string_pat = re.compile(".*'([^']*)'.*")
for apkfile in glob.glob(os.path.join(repodir, '*.apk')):
apkfilename = apkfile[len(repodir) + 1:]
- if apkfilename.find(' ') != -1:
+ if ' ' in apkfilename:
print "No spaces in APK filenames!"
sys.exit(1)
thisinfo['size'] = os.path.getsize(apkfile)
thisinfo['permissions'] = []
thisinfo['features'] = []
+ thisinfo['icons_src'] = {}
+ thisinfo['icons'] = {}
p = subprocess.Popen([os.path.join(config['sdk_path'], 'build-tools', config['build_tools'], 'aapt'),
'dump', 'badging', apkfile],
stdout=subprocess.PIPE)
output = p.communicate()[0]
- if options.verbose:
- print output
if p.returncode != 0:
print "ERROR: Failed to get apk information"
sys.exit(1)
sys.exit(1)
elif line.startswith("application:"):
thisinfo['name'] = re.match(label_pat, line).group(1)
+ # Keep path to non-dpi icon in case we need it
+ match = re.match(icon_pat_nodpi, line)
+ if match:
+ thisinfo['icons_src']['-1'] = match.group(1)
+ elif line.startswith("launchable-activity:"):
+ # Only use launchable-activity as fallback to application
+ if '-1' not in thisinfo['icons_src']:
+ match = re.match(icon_pat_nodpi, line)
+ if match:
+ thisinfo['icons_src']['-1'] = match.group(1)
+ elif line.startswith("application-icon-"):
match = re.match(icon_pat, line)
if match:
- thisinfo['iconsrc'] = match.group(1)
+ density = match.group(1)
+ path = match.group(2)
+ thisinfo['icons_src'][density] = path
elif line.startswith("sdkVersion:"):
thisinfo['sdkversion'] = re.match(sdkversion_pat, line).group(1)
elif line.startswith("native-code:"):
sys.exit(1)
thisinfo['sig'] = output[7:].strip()
+ apk = zipfile.ZipFile(apkfile, 'r')
+
+ iconfilename = "%s.%s.png" % (
+ thisinfo['id'],
+ thisinfo['versioncode'])
+
# Extract the icon file...
- if 'iconsrc' in thisinfo:
- apk = zipfile.ZipFile(apkfile, 'r')
- thisinfo['icon'] = (thisinfo['id'] + '.' +
- str(thisinfo['versioncode']) + '.png')
- iconpath = os.path.join(icon_dir, thisinfo['icon'])
+ densities = get_densities()
+ empty_densities = []
+ for density in densities:
+ if density not in thisinfo['icons_src']:
+ empty_densities.append(density)
+ continue
+ iconsrc = thisinfo['icons_src'][density]
+ icon_dir = get_icon_dir(repodir, density)
+ icondest = os.path.join(icon_dir, iconfilename)
+
try:
- iconfile = open(iconpath, 'wb')
- iconfile.write(apk.read(thisinfo['iconsrc']))
+ iconfile = open(icondest, 'wb')
+ iconfile.write(apk.read(iconsrc))
iconfile.close()
+ thisinfo['icons'][density] = iconfilename
+
except:
print "WARNING: Error retrieving icon file"
- apk.close()
+ del thisinfo['icons'][density]
+ del thisinfo['icons_src'][density]
+ empty_densities.append(density)
+
+ if '-1' in thisinfo['icons_src']:
+ iconsrc = thisinfo['icons_src']['-1']
+ iconpath = os.path.join(
+ get_icon_dir(repodir, None), iconfilename)
+ iconfile = open(iconpath, 'wb')
+ iconfile.write(apk.read(iconsrc))
+ iconfile.close()
+ try:
+ im = Image.open(iconpath)
+ dpi = px_to_dpi(im.size[0])
+ for density in densities:
+ if density in thisinfo['icons']:
+ break
+ if density == densities[-1] or dpi >= int(density):
+ thisinfo['icons'][density] = iconfilename
+ shutil.move(iconpath,
+ os.path.join(get_icon_dir(repodir, density), iconfilename))
+ empty_densities.remove(density)
+ break
+ except Exception,e:
+ print "WARNING: Failed reading {0} - {1}".format(iconpath, e)
+
+ if thisinfo['icons']:
+ thisinfo['icon'] = iconfilename
+
+ apk.close()
+
+ # First try resizing down to not lose quality
+ last_density = None
+ for density in densities:
+ if density not in empty_densities:
+ last_density = density
+ continue
+ if last_density is None:
+ continue
+ if options.verbose:
+ print "Density %s not available, resizing down from %s" % (
+ density, last_density)
+
+ last_iconpath = os.path.join(
+ get_icon_dir(repodir, last_density), iconfilename)
+ iconpath = os.path.join(
+ get_icon_dir(repodir, density), iconfilename)
+ im = Image.open(last_iconpath)
+ size = dpi_to_px(density)
+
+ im.thumbnail((size, size), Image.ANTIALIAS)
+ im.save(iconpath, "PNG")
+ empty_densities.remove(density)
+
+ # Then just copy from the highest resolution available
+ last_density = None
+ for density in reversed(densities):
+ if density not in empty_densities:
+ last_density = density
+ continue
+ if last_density is None:
+ continue
+ if options.verbose:
+ print "Density %s not available, copying from lower density %s" % (
+ density, last_density)
+
+ shutil.copyfile(
+ os.path.join(get_icon_dir(repodir, last_density), iconfilename),
+ os.path.join(get_icon_dir(repodir, density), iconfilename))
- resize_icon(iconpath)
+ empty_densities.remove(density)
+
+ for density in densities:
+ icon_dir = get_icon_dir(repodir, density)
+ icondest = os.path.join(icon_dir, iconfilename)
+ resize_icon(icondest, density)
+
+ # Copy from icons-mdpi to icons since mdpi is the baseline density
+ baseline = os.path.join(get_icon_dir(repodir, '160'), iconfilename)
+ if os.path.isfile(baseline):
+ shutil.copyfile(baseline,
+ os.path.join(get_icon_dir(repodir, None), iconfilename))
# Record in known apks, getting the added date at the same time..
added = knownapks.recordapk(thisinfo['apkname'], thisinfo['id'])
doc.appendChild(root)
repoel = doc.createElement("repo")
+
if archive:
repoel.setAttribute("name", config['archive_name'])
if config['repo_maxage'] != 0:
repoel.setAttribute("icon", os.path.basename(config['archive_icon']))
repoel.setAttribute("url", config['archive_url'])
addElement('description', config['archive_description'], doc, repoel)
+
else:
repoel.setAttribute("name", config['repo_name'])
if config['repo_maxage'] != 0:
repoel.setAttribute("icon", os.path.basename(config['repo_icon']))
repoel.setAttribute("url", config['repo_url'])
addElement('description', config['repo_description'], doc, repoel)
+
+ repoel.setAttribute("version", "11")
repoel.setAttribute("timestamp", str(int(time.time())))
- if config['repo_keyalias'] is not None:
+ if config['repo_keyalias']:
# Generate a certificate fingerprint the same way keytool does it
# (but with slightly different formatting)
addElement('lastupdated', time.strftime('%Y-%m-%d', app['lastupdated']), doc, apel)
addElement('name', app['Name'], doc, apel)
addElement('summary', app['Summary'], doc, apel)
- if app['icon'] is not None:
+ if app['icon']:
addElement('icon', app['icon'], doc, apel)
def linkres(link):
for app in apps:
if app['id'] == link:
return ("fdroid.app:" + link, app['Name'])
raise MetaDataException("Cannot resolve app id " + link)
- addElement('desc',
+ addElement('desc',
metadata.description_html(app['Description'], linkres), doc, apel)
addElement('license', app['License'], doc, apel)
if 'Categories' in app:
addElement('web', app['Web Site'], doc, apel)
addElement('source', app['Source Code'], doc, apel)
addElement('tracker', app['Issue Tracker'], doc, apel)
- if app['Donate'] is not None:
+ if app['Donate']:
addElement('donate', app['Donate'], doc, apel)
- if app['Bitcoin'] is not None:
+ if app['Bitcoin']:
addElement('bitcoin', app['Bitcoin'], doc, apel)
- if app['Litecoin'] is not None:
+ if app['Litecoin']:
addElement('litecoin', app['Litecoin'], doc, apel)
- if app['FlattrID'] is not None:
+ if app['Dogecoin']:
+ addElement('dogecoin', app['Dogecoin'], doc, apel)
+ if app['FlattrID']:
addElement('flattr', app['FlattrID'], doc, apel)
# These elements actually refer to the current version (i.e. which
# filtering can be removed in time...
if 'UpstreamNonFree' in af:
af.remove('UpstreamNonFree')
- addElement('antifeatures', ','.join(af), doc, apel)
+ if af:
+ addElement('antifeatures', ','.join(af), doc, apel)
+ if app['Provides']:
+ pv = app['Provides'].split(',')
+ addElement('provides', ','.join(pv), doc, apel)
if app['Requires Root']:
addElement('requirements', 'root', doc, apel)
if app['Requires Root']:
if 'ACCESS_SUPERUSER' not in apk['permissions']:
apk['permissions'].append('ACCESS_SUPERUSER')
-
+
if len(apk['permissions']) > 0:
addElement('permissions', ','.join(apk['permissions']), doc, apkel)
if 'nativecode' in apk and len(apk['nativecode']) > 0:
if not options.quiet:
print "Creating signed index."
print "Key fingerprint:", repo_pubkey_fingerprint
-
+
#Create a jar of the index...
p = subprocess.Popen(['jar', 'cf', 'index.jar', 'index.xml'],
cwd=repodir, stdout=subprocess.PIPE)
print output
# Copy the repo icon into the repo directory...
- icon_dir=os.path.join(repodir ,'icons')
+ icon_dir = os.path.join(repodir ,'icons')
iconfilename = os.path.join(icon_dir, os.path.basename(config['repo_icon']))
shutil.copyfile(config['repo_icon'], iconfilename)
-def archive_old_apks(apps, apks, repodir, archivedir, defaultkeepversions):
+def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversions):
for app in apps:
if 'srcname' in apk:
shutil.move(os.path.join(repodir, apk['srcname']),
os.path.join(archivedir, apk['srcname']))
+ archapks.append(apk)
apks.remove(apk)
print "Generated skeleton metadata for " + apk['id']
else:
print "WARNING: " + apk['apkname'] + " (" + apk['id'] + ") has no metadata"
- print " " + apk['name'] + " - " + apk['version']
+ print " " + apk['name'] + " - " + apk['version']
if len(repodirs) > 1:
- archive_old_apks(apps, apks, repodirs[0], repodirs[1], config['archive_older'])
+ archive_old_apks(apps, apks, archapks, repodirs[0], repodirs[1], config['archive_older'])
# Make the index for the main repo...
make_index(apps, apks, repodirs[0], False, categories)
from optparse import OptionParser
import common
-from common import BuildException
options = None
config = None
global options, config
# Parse command line...
- parser = OptionParser()
+ parser = OptionParser(usage="Usage: %prog [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
parser.add_option("-v", "--verbose", action="store_true", default=False,
help="Spew out even more information than normal")
- parser.add_option("-p", "--package", default=None,
- help="Verify only the specified package")
(options, args) = parser.parse_args()
config = common.read_config(options)
verified = 0
notverified = 0
+ vercodes = common.read_pkg_args(args, True)
+
for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):
apkfilename = os.path.basename(apkfile)
- i = apkfilename.rfind('_')
- if i == -1:
- raise BuildException("Invalid apk name")
- appid = apkfilename[:i]
-
- if not options.package or options.package == appid:
-
- try:
-
- print "Processing " + apkfilename
-
- remoteapk = os.path.join(tmp_dir, apkfilename)
- if os.path.exists(remoteapk):
- os.remove(remoteapk)
- url = 'https://f-droid.org/repo/' + apkfilename
- print "...retrieving " + url
- p = subprocess.Popen(['wget', url],
- cwd=tmp_dir,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out = p.communicate()[0]
- if p.returncode != 0:
- raise Exception("Failed to get " + apkfilename)
-
- thisdir = os.path.join(tmp_dir, 'this_apk')
- thatdir = os.path.join(tmp_dir, 'that_apk')
- for d in [thisdir, thatdir]:
- if os.path.exists(d):
- shutil.rmtree(d)
- os.mkdir(d)
-
- if subprocess.call(['jar', 'xf',
- os.path.join("..", "..", unsigned_dir, apkfilename)],
- cwd=thisdir) != 0:
- raise Exception("Failed to unpack local build of " + apkfilename)
- if subprocess.call(['jar', 'xf', os.path.join("..", "..", remoteapk)],
- cwd=thatdir) != 0:
- raise Exception("Failed to unpack remote build of " + apkfilename)
-
- p = subprocess.Popen(['diff', '-r', 'this_apk', 'that_apk'],
- cwd=tmp_dir, stdout=subprocess.PIPE)
- out = p.communicate()[0]
- lines = out.splitlines()
- if len(lines) != 1 or lines[0].find('META-INF') == -1:
- raise Exception("Unexpected diff output - " + out)
-
- print "...successfully verified"
- verified += 1
-
- except Exception, e:
- print "...NOT verified - {0}".format(e)
- notverified += 1
+ appid, vercode = common.apknameinfo(apkfile)
+
+ if vercodes and appid not in vercodes:
+ continue
+ if vercodes[appid] and vercode not in vercodes[appid]:
+ continue
+
+ try:
+
+ print "Processing " + apkfilename
+
+ remoteapk = os.path.join(tmp_dir, apkfilename)
+ if os.path.exists(remoteapk):
+ os.remove(remoteapk)
+ url = 'https://f-droid.org/repo/' + apkfilename
+ print "...retrieving " + url
+ p = subprocess.Popen(['wget', url],
+ cwd=tmp_dir,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ out = p.communicate()[0]
+ if p.returncode != 0:
+ raise Exception("Failed to get " + apkfilename)
+
+ thisdir = os.path.join(tmp_dir, 'this_apk')
+ thatdir = os.path.join(tmp_dir, 'that_apk')
+ for d in [thisdir, thatdir]:
+ if os.path.exists(d):
+ shutil.rmtree(d)
+ os.mkdir(d)
+
+ if subprocess.call(['jar', 'xf',
+ os.path.join("..", "..", unsigned_dir, apkfilename)],
+ cwd=thisdir) != 0:
+ raise Exception("Failed to unpack local build of " + apkfilename)
+ if subprocess.call(['jar', 'xf', os.path.join("..", "..", remoteapk)],
+ cwd=thatdir) != 0:
+ raise Exception("Failed to unpack remote build of " + apkfilename)
+
+ p = subprocess.Popen(['diff', '-r', 'this_apk', 'that_apk'],
+ cwd=tmp_dir, stdout=subprocess.PIPE)
+ out = p.communicate()[0]
+ lines = out.splitlines()
+ if len(lines) != 1 or 'META-INF' not in lines[0]:
+ raise Exception("Unexpected diff output - " + out)
+
+ print "...successfully verified"
+ verified += 1
+
+ except Exception, e:
+ print "...NOT verified - {0}".format(e)
+ notverified += 1
print "\nFinished"
print "{0} successfully verified".format(verified)
+++ /dev/null
-# You will need to alter these before running makebuildserver.py
-
-# Name of the base box to use...
-basebox = "raring32"
-
-# Location where raring32.box can be found, if you don't already have
-# it. Could be set to https://f-droid.org/raring32.box if you like...
-baseboxurl = "/shares/software/OS and Boot/raring32.box"
-
-memory = 3584
-
-# Debian package proxy server - set this to None unless you have one...
-aptproxy = "http://192.168.0.19:8000"
-
-# Set to True if your base box is 64 bit...
-arch64 = False
import sys
import subprocess
import time
+import hashlib
from optparse import OptionParser
def vagrant(params, cwd=None, printout=False):
config = {}
execfile('makebs.config.py', config)
-if not os.path.exists('makebuildserver.py') or not os.path.exists(serverdir):
+if not os.path.exists('makebuildserver') or not os.path.exists(serverdir):
print 'This must be run from the correct directory!'
sys.exit(1)
('android-sdk_r22.3-linux.tgz',
'http://dl.google.com/android/android-sdk_r22.3-linux.tgz',
'4077575c98075480e0156c10e48a1521e31c7952768271a206870e6813057f4f'),
+ ('gradle-1.4-bin.zip',
+ 'http://services.gradle.org/distributions/gradle-1.4-bin.zip',
+ 'cd99e85fbcd0ae8b99e81c9992a2f10cceb7b5f009c3720ef3a0078f4f92e94e'),
+ ('gradle-1.6-bin.zip',
+ 'http://services.gradle.org/distributions/gradle-1.6-bin.zip',
+ 'de3e89d2113923dcc2e0def62d69be0947ceac910abd38b75ec333230183fac4'),
+ ('gradle-1.7-bin.zip',
+ 'http://services.gradle.org/distributions/gradle-1.7-bin.zip',
+ '360c97d51621b5a1ecf66748c718594e5f790ae4fbc1499543e0c006033c9d30'),
('gradle-1.8-bin.zip',
'http://services.gradle.org/distributions/gradle-1.8-bin.zip',
'a342bbfa15fd18e2482287da4959588f45a41b60910970a16e6d97959aea5703'),
+ ('gradle-1.9-bin.zip',
+ 'http://services.gradle.org/distributions/gradle-1.9-bin.zip',
+ '097ddc2bcbc9da2bb08cbf6bf8079585e35ad088bafd42e8716bc96405db98e9'),
('Kivy-1.7.2.tar.gz',
'http://pypi.python.org/packages/source/K/Kivy/Kivy-1.7.2.tar.gz',
'0485e2ef97b5086df886eb01f8303cb542183d2d71a159466f99ad6c8a1d03f1')
]
if config['arch64']:
cachefiles.extend([
- ('android-ndk-r9b-linux-x64_64.tar.bz2',
+ ('android-ndk-r9b-linux-x86_64.tar.bz2',
'http://dl.google.com/android/ndk/android-ndk-r9b-linux-x86_64.tar.bz2',
'8956e9efeea95f49425ded8bb697013b66e162b064b0f66b5c75628f76e0f532'),
('android-ndk-r9b-linux-x86_64-legacy-toolchains.tar.bz2',
'http://dl.google.com/android/ndk/android-ndk-r9b-linux-x86-legacy-toolchains.tar.bz2',
'606aadf815ae28cc7b0154996247c70d609f111b14e44bcbcd6cad4c87fefb6f')])
wanted = []
+
+def sha256_for_file(path):
+ with open(path, 'r') as f:
+ s = hashlib.sha256()
+ while True:
+ data = f.read(4096)
+ if not data:
+ break
+ s.update(data)
+ return s.hexdigest()
+
for f, src, shasum in cachefiles:
- if not os.path.exists(os.path.join(cachedir, f)):
+ relpath = os.path.join(cachedir, f)
+ if not os.path.exists(relpath):
print "Downloading " + f + " to cache"
if subprocess.call(['wget', src], cwd=cachedir) != 0:
print "...download of " + f + " failed."
sys.exit(1)
if shasum:
- p = subprocess.Popen(['shasum', '-a', '256', os.path.join(cachedir, f)],
- stdout=subprocess.PIPE)
- v = p.communicate()[0].split(' ')[0]
+ v = sha256_for_file(relpath)
if v != shasum:
print "Invalid shasum of '" + v + "' detected for " + f
sys.exit(1)
vagrantfile += """
config.vm.provision :chef_solo do |chef|
chef.cookbooks_path = "cookbooks"
- chef.log_level = :debug
+ chef.log_level = :debug
chef.json = {
:settings => {
:sdk_loc => "/home/vagrant/android-sdk",
chef.add_recipe "fdroidbuild-general"
chef.add_recipe "android-sdk"
chef.add_recipe "android-ndk"
+ chef.add_recipe "gradle"
chef.add_recipe "kivy"
end
end
print "Status: " + line
print "Packaging"
-vagrant(['package', '--output', os.path.join('..', boxfile)], serverdir)
+vagrant(['package', '--output', os.path.join('..', boxfile)], serverdir,
+ printout=options.verbose)
print "Adding box"
-vagrant(['box', 'add', 'buildserver', boxfile, '-f'])
+vagrant(['box', 'add', 'buildserver', boxfile, '-f'],
+ printout=options.verbose)
os.remove(boxfile)
+#!/usr/bin/env python2
+
# Copy this file to config.py, then amend the settings below according to
# your system configuration.
ndk_path = "$ANDROID_NDK"
# Build tools version to be used
-build_tools = "18.1.1"
+build_tools = "19.0.1"
# Command for running maven 3 (command or full path)
mvn3 = "mvn3"
# Command for running Gradle (command or full path)
gradle = "gradle"
-# Android gradle plugin version
-# "0.5.+" -> gradle 1.7
-# "0.6.+" -> gradle 1.8
-gradle_plugin = "0.6.+"
-
-# Max height and width (in pixels) for the icons in the repo
-# This corresponds to 72x72 pixels, i.e. mdpi
-icon_max_size = 72
-
# Set the maximum age (in days) of an index that a client should accept from
# this repo. Setting it to 0 or not setting it at all disables this
# functionality. If you do set this to a non-zero value, you need to ensure
repo_name = "F-Droid"
repo_icon = "fdroid-icon.png"
repo_description = """
-The official repository of the F-Droid client. Applications in this repository
+The official repository of the F-Droid client. Applications in this repository
are either official binaries built by the original application developers, or
are binaries built from source by the admin of f-droid.org using the tools on
https://gitorious.org/f-droid.
#Set this to true to always use a build server. This saves specifying the
#--server option on dedicated secure build server hosts.
build_server_always = False
+
+# Limit in number of characters that fields can take up
+# Only the fields listed here are supported, defaults shown
+char_limits = {
+ 'Summary' : 50,
+ 'Description' : 1500
+}
--- /dev/null
+#!/usr/bin/env python2
+#
+# You may want to alter these before running ./makebuildserver
+
+# Name of the base box to use
+basebox = "testing32"
+
+# Location where raring32.box can be found, if you don't already have
+# it. For security reasons, it's recommended that you make your own
+# in a secure environment using trusted media (see the manual) but
+# you can use this default if you like...
+baseboxurl = "https://f-droid.org/testing32.box"
+
+memory = 3584
+
+# Debian package proxy server - if you have one, e.g. "http://192.168.0.19:8000"
+aptproxy = None
+
+# Set to True if your base box is 64 bit (e.g. testing32.box isn't)
+arch64 = False
description='F-Droid Server Tools',
long_description=open('README').read(),
author='The F-Droid Project',
- author_email='admin@f-droid.org',
- url='http://f-droid.org',
+ author_email='team@f-droid.org',
+ url='https://f-droid.org',
packages=['fdroidserver'],
scripts=['fdroid'],
data_files=[
('share/doc/fdroidserver/examples',
- ['config.buildserver.py', 'config.sample.py', 'makebs.config.sample.py',
+ [ 'config.buildserver.py',
+ 'sampleconfigs/config.sample.py',
+ 'sampleconfigs/makebs.config.sample.py',
'fdroid-icon.png']),
],
install_requires=[
$this->android_strings_file_path = $android_strings_file_path_in;
$this->cache_file_path = $cache_file_path_in;
}
-
+
// Returns an associative array with android permissions and data about them
function get_permissions_array() {
if($query_vars['fdcategory'] == 'All categories') {
unset($query_vars['fdcategory']);
}
-
+
if($query_vars['fdid']!==null) {
$out.=$this->get_app($query_vars);
} else {
case "name":
$name=$el;
break;
+ case "added":
+ $added=$el;
+ break;
case "icon":
$icon=$el;
break;
foreach($apks as $apk) {
$first = $i+1==count($apks);
$out.="<p><b>Version ".$apk['version']."</b><br />";
+ $out.="Added on ".$apk['added']."<br />";
// Is this source or binary?
$srcbuild = isset($apk['srcname']) && file_exists($this->site_path.'/repo/'.$apk['srcname']);
$out.='<form name="categoryform" action="" method="get">';
$out.=$this->makeformdata($query_vars);
-
+
$out.='<select name="fdcategory" style="color:#333333;" onChange="document.categoryform.submit();">';
foreach($categories as $category) {
$out.='<option';
$out.='>'.$category.'</option>';
}
$out.='</select>';
-
+
$out.='</form>'."\n";
}
else {
extract($args);
echo $before_widget;
echo $before_title . 'Latest Apps' . $after_title;
-
+
$handle = fopen(getenv('DOCUMENT_ROOT').'/repo/latestapps.dat', 'r');
if ($handle) {
while (($buffer = fgets($handle, 4096)) !== false) {
}
fclose($handle);
}
-
+
echo $after_widget;
}