# setup.py will extend sys.path to include our support/lib/... directory
# itself. It will also create it in the beginning of the 'develop' command.
-PP=$(shell $(PYTHON) setup.py -q show_pythonpath)
-RUNPP=$(PYTHON) setup.py run_with_pythonpath
TAHOE=$(PYTHON) bin/tahoe
+SOURCES=src/allmydata src/buildtest static misc bin/tahoe-script.template twisted setup.py
.PHONY: make-version build
-# The 'darcsver' setup.py command comes in the 'darcsver' package:
-# http://pypi.python.org/pypi/darcsver It is necessary only if you want to
-# automatically produce a new _version.py file from the current darcs history.
+# This is necessary only if you want to automatically produce a new
+# _version.py file from the current git/darcs history.
make-version:
- $(PYTHON) ./setup.py darcsver --count-all-patches
-
-# We want src/allmydata/_version.py to be up-to-date, but it's a fairly
-# expensive operation (about 6 seconds on a just-before-0.7.0 tree, probably
-# because of the 332 patches since the last tag), and we've removed the need
-# for an explicit 'build' step by removing the C code from src/allmydata and
-# by running everything in place. It would be neat to do:
-#
-#src/allmydata/_version.py: _darcs/patches
-# $(MAKE) make-version
-#
-# since that would update the embedded version string each time new darcs
-# patches were pulled, but without an obligatory 'build' step this rule
-# wouldn't be run frequently enough anyways.
-#
-# So instead, I'll just make sure that we update the version at least once
-# when we first start using the tree, and again whenever an explicit
-# 'make-version' is run, since then at least the developer has some means to
-# update things. It would be nice if 'make clean' deleted any
-# automatically-generated _version.py too, so that 'make clean; make all'
-# could be useable as a "what the heck is going on, get me back to a clean
-# state', but we need 'make clean' to work on non-darcs trees without
-# destroying useful information.
+ $(PYTHON) ./setup.py update_version
.built:
$(MAKE) build
src/allmydata/_version.py:
$(MAKE) make-version
-build: src/allmydata/_version.py
+# It is unnecessary to have this depend on build or src/allmydata/_version.py,
+# since 'setup.py build' always updates the version using 'darcsver --count-all-patches'.
+build:
$(PYTHON) setup.py build
touch .built
# 'make install PREFIX=/usr/local/stow/tahoe-N.N' will do the same, but to
# a different location
-install: src/allmydata/_version.py
+install:
ifdef PREFIX
mkdir -p $(PREFIX)
$(PYTHON) ./setup.py install --single-version-externally-managed \
check: test
-test-coverage: build src/allmydata/_version.py
+test-coverage: build
rm -f .coverage
$(TAHOE) debug trial --reporter=bwverbose-coverage $(TEST)
quicktest:
$(TAHOE) debug trial $(TRIALARGS) $(TEST)
+# "make tmpfstest" may be a faster way of running tests on Linux. It works best when you have
+# at least 330 MiB of free physical memory (to run the whole test suite). Since it uses sudo
+# to mount/unmount the tmpfs filesystem, it might prompt for your password.
+tmpfstest:
+ time make _tmpfstest 'TMPDIR=$(shell mktemp -d --tmpdir=.)'
+
+_tmpfstest:
+ sudo mount -t tmpfs -o size=400m tmpfs '$(TMPDIR)'
+ -$(TAHOE) debug trial --rterrors '--temp-directory=$(TMPDIR)/_trial_temp' $(TRIALARGS) $(TEST)
+ sudo umount '$(TMPDIR)'
+ rmdir '$(TMPDIR)'
+
# code-coverage: install the "coverage" package from PyPI, do "make
# quicktest-coverage" to do a unit test run with coverage-gathering enabled,
# then use "make coverate-output-text" for a brief report, or "make
false
endif
+code-checks: build version-and-path check-interfaces check-miscaptures -find-trailing-spaces -check-umids pyflakes
+
+version-and-path:
+ $(TAHOE) --version-and-path
+
+check-interfaces:
+ $(TAHOE) @misc/coding_tools/check-interfaces.py 2>&1 |tee violations.txt
+ @echo
+
+check-miscaptures:
+ $(PYTHON) misc/coding_tools/check-miscaptures.py $(SOURCES) 2>&1 |tee miscaptures.txt
+ @echo
pyflakes:
- $(PYTHON) -OOu `which pyflakes` src/allmydata static misc/build_helpers bin/tahoe-script.template twisted setup.py |sort |uniq
+ @$(PYTHON) -OOu `which pyflakes` $(SOURCES) |sort |uniq
+ @echo
+
check-umids:
- $(PYTHON) misc/coding_tools/check-umids.py `find src/allmydata -name '*.py'`
+ $(PYTHON) misc/coding_tools/check-umids.py `find $(SOURCES) -name '*.py' -not -name 'old.py'`
+ @echo
+
+-check-umids:
+ -$(PYTHON) misc/coding_tools/check-umids.py `find $(SOURCES) -name '*.py' -not -name 'old.py'`
+ @echo
+
+doc-checks: check-rst
+
+check-rst:
+ @for x in `find *.rst docs -name "*.rst"`; do rst2html -v $${x} >/dev/null; done 2>&1 |grep -v 'Duplicate implicit target name:'
+ @echo
count-lines:
@echo -n "files: "
- @find src -name '*.py' |grep -v /build/ |wc --lines
+ @find src -name '*.py' |grep -v /build/ |wc -l
@echo -n "lines: "
- @cat `find src -name '*.py' |grep -v /build/` |wc --lines
+ @cat `find src -name '*.py' |grep -v /build/` |wc -l
@echo -n "TODO: "
- @grep TODO `find src -name '*.py' |grep -v /build/` | wc --lines
+ @grep TODO `find src -name '*.py' |grep -v /build/` | wc -l
+ @echo -n "XXX: "
+ @grep XXX `find src -name '*.py' |grep -v /build/` | wc -l
check-memory: .built
rm -rf _test_memory
- $(RUNPP) -p -c "src/allmydata/test/check_memory.py upload"
- $(RUNPP) -p -c "src/allmydata/test/check_memory.py upload-self"
- $(RUNPP) -p -c "src/allmydata/test/check_memory.py upload-POST"
- $(RUNPP) -p -c "src/allmydata/test/check_memory.py download"
- $(RUNPP) -p -c "src/allmydata/test/check_memory.py download-GET"
- $(RUNPP) -p -c "src/allmydata/test/check_memory.py download-GET-slow"
- $(RUNPP) -p -c "src/allmydata/test/check_memory.py receive"
+ $(TAHOE) @src/allmydata/test/check_memory.py upload
+ $(TAHOE) @src/allmydata/test/check_memory.py upload-self
+ $(TAHOE) @src/allmydata/test/check_memory.py upload-POST
+ $(TAHOE) @src/allmydata/test/check_memory.py download
+ $(TAHOE) @src/allmydata/test/check_memory.py download-GET
+ $(TAHOE) @src/allmydata/test/check_memory.py download-GET-slow
+ $(TAHOE) @src/allmydata/test/check_memory.py receive
check-memory-once: .built
rm -rf _test_memory
- $(RUNPP) -p -c "src/allmydata/test/check_memory.py $(MODE)"
+ $(TAHOE) @src/allmydata/test/check_memory.py $(MODE)
# The check-speed target uses a pre-established client node to run a canned
# set of performance tests against a test network that is also
-$(TAHOE) stop $(TESTCLIENTDIR)
$(TAHOE) start $(TESTCLIENTDIR)
sleep 5
- $(PYTHON) src/allmydata/test/check_speed.py $(TESTCLIENTDIR)
+ $(TAHOE) @src/allmydata/test/check_speed.py $(TESTCLIENTDIR)
$(TAHOE) stop $(TESTCLIENTDIR)
# The check-grid target also uses a pre-established client node, along with a
# in src/allmydata/test/check_grid.py to see how to set this up.
check-grid: .built
if [ -z '$(TESTCLIENTDIR)' ]; then exit 1; fi
- $(PYTHON) src/allmydata/test/check_grid.py $(TESTCLIENTDIR) bin/tahoe
+ $(TAHOE) @src/allmydata/test/check_grid.py $(TESTCLIENTDIR) bin/tahoe
bench-dirnode: .built
- $(RUNPP) -p -c src/allmydata/test/bench_dirnode.py
+ $(TAHOE) @src/allmydata/test/bench_dirnode.py
+
+# the provisioning tool runs as a stand-alone webapp server
+run-provisioning-tool: .built
+ $(TAHOE) @misc/operations_helpers/provisioning/run.py
# 'make repl' is a simple-to-type command to get a Python interpreter loop
# from which you can type 'import allmydata'
$(MAKE)
$(PYTHON) misc/build_helpers/test-darcs-boringfile.py
+test-git-ignore:
+ $(MAKE)
+ $(PYTHON) misc/build_helpers/test-git-ignore.py
+
test-clean:
find . |grep -vEe "_darcs|allfiles.tmp|src/allmydata/_(version|appname).py" |sort >allfiles.tmp.old
$(MAKE)
find . |grep -vEe "_darcs|allfiles.tmp|src/allmydata/_(version|appname).py" |sort >allfiles.tmp.new
diff allfiles.tmp.old allfiles.tmp.new
+# It would be nice if 'make clean' deleted any automatically-generated
+# _version.py too, so that 'make clean; make all' could be useable as a
+# "what the heck is going on, get me back to a clean state', but we need
+# 'make clean' to work on non-darcs trees without destroying useful information.
clean:
rm -rf build _trial_temp _test_memory .built
rm -f `find src *.egg -name '*.so' -or -name '*.pyc'`
rm -f bin/tahoe bin/tahoe.pyscript
find-trailing-spaces:
- $(PYTHON) misc/coding_tools/find-trailing-spaces.py -r src
+ $(PYTHON) misc/coding_tools/find-trailing-spaces.py -r $(SOURCES)
+ @echo
+
+-find-trailing-spaces:
+ -$(PYTHON) misc/coding_tools/find-trailing-spaces.py -r $(SOURCES)
+ @echo
# The test-desert-island target grabs the tahoe-deps tarball, unpacks it,
# does a build, then asserts that the build did not try to download anything
# support/lib/ directory is gone.
fetch-and-unpack-deps:
- test -f tahoe-deps.tar.gz || wget http://tahoe-lafs.org/source/tahoe/deps/tahoe-deps.tar.gz
+ test -f tahoe-deps.tar.gz || wget https://tahoe-lafs.org/source/tahoe/deps/tahoe-deps.tar.gz
rm -rf tahoe-deps
tar xzf tahoe-deps.tar.gz
$(PYTHON) setup.py sdist --sumo --formats=bztar,gztar,zip
upload-tarballs:
- @if [ "X${BB_BRANCH}" == "Xtrunk" ] || [ "X${BB_BRANCH}" == "X" ]; then for f in dist/allmydata-tahoe-*; do flappclient --furlfile ~/.tahoe-tarball-upload.furl upload-file $$f; done ; else echo not uploading tarballs because this is not trunk but is branch \"${BB_BRANCH}\" ; fi
+ @if [ "X${BB_BRANCH}" = "Xmaster" ] || [ "X${BB_BRANCH}" = "X" ]; then for f in dist/allmydata-tahoe-*; do flappclient --furlfile ~/.tahoe-tarball-upload.furl upload-file $$f; done ; else echo not uploading tarballs because this is not trunk but is branch \"${BB_BRANCH}\" ; fi