mirror of
https://github.com/torproject/tor.git
synced 2024-11-26 19:40:41 +00:00
Replace code and update README.md for current location of project.
This commit is contained in:
parent
3cb6a690be
commit
27d4ba90f6
139
.appveyor.yml
139
.appveyor.yml
@ -1,139 +0,0 @@
|
||||
version: 1.0.{build}
|
||||
|
||||
clone_depth: 50
|
||||
|
||||
# Appveyor images are named after the Visual Studio version they contain.
|
||||
# But we compile using MinGW, not Visual Studio.
|
||||
# We use these images because they have different Windows versions.
|
||||
image:
|
||||
# Windows Server 2019
|
||||
- Visual Studio 2019
|
||||
|
||||
environment:
|
||||
compiler: mingw
|
||||
|
||||
matrix:
|
||||
- target: i686-w64-mingw32
|
||||
compiler_path: mingw32
|
||||
mingw_prefix: mingw-w64-i686
|
||||
hardening: --enable-all-bugs-are-fatal
|
||||
- target: x86_64-w64-mingw32
|
||||
compiler_path: mingw64
|
||||
mingw_prefix: mingw-w64-x86_64
|
||||
# hardening doesn't work with mingw-w64-x86_64-gcc, because it's gcc 8
|
||||
hardening: --disable-gcc-hardening
|
||||
|
||||
matrix:
|
||||
# Don't keep building failing jobs
|
||||
fast_finish: true
|
||||
# Skip the 32-bit Windows Server 2019 job, and the 64-bit Windows Server
|
||||
# 2012 R2 job, to speed up the build.
|
||||
# The environment variables must be listed without the 'environment' tag.
|
||||
exclude:
|
||||
- image: Visual Studio 2019
|
||||
target: i686-w64-mingw32
|
||||
compiler_path: mingw32
|
||||
mingw_prefix: mingw-w64-i686
|
||||
hardening: --enable-all-bugs-are-fatal
|
||||
|
||||
install:
|
||||
- ps: >-
|
||||
Function Execute-Command ($commandPath)
|
||||
{
|
||||
& $commandPath $args 2>&1
|
||||
if ( $LastExitCode -ne 0 ) {
|
||||
$host.SetShouldExit( $LastExitCode )
|
||||
}
|
||||
}
|
||||
Function Execute-Bash ()
|
||||
{
|
||||
Execute-Command 'c:\msys64\usr\bin\bash' '-e' '-c' $args
|
||||
}
|
||||
<# mingw packages start with ${env:mingw_prefix}
|
||||
# unprefixed packages are from MSYS2, which is like Cygwin. Avoid them.
|
||||
#
|
||||
# Use pacman --debug to show package downloads and install locations
|
||||
#
|
||||
# All installed library dlls must be copied to the test and app
|
||||
# directories, before running tor's tests. (See below.)
|
||||
#>
|
||||
Execute-Command "C:\msys64\usr\bin\pacman" -Syu --verbose --noconfirm pacman ;
|
||||
- ps: >-
|
||||
Execute-Command "C:\msys64\usr\bin\pacman" -Sy --verbose --needed --noconfirm ${env:mingw_prefix}-libevent ${env:mingw_prefix}-openssl ${env:mingw_prefix}-pkg-config ${env:mingw_prefix}-xz ${env:mingw_prefix}-zstd ;
|
||||
|
||||
build_script:
|
||||
- ps: >-
|
||||
if ($env:compiler -eq "mingw") {
|
||||
<# use the MSYS2 compiler and user binaries to build and install #>
|
||||
$oldpath = ${env:Path} -split ';'
|
||||
$buildpath = @("C:\msys64\${env:compiler_path}\bin", "C:\msys64\usr\bin") + $oldpath
|
||||
$env:Path = @($buildpath) -join ';'
|
||||
$env:build = @("${env:APPVEYOR_BUILD_FOLDER}", $env:target) -join '\'
|
||||
Set-Location "${env:APPVEYOR_BUILD_FOLDER}"
|
||||
Execute-Bash 'autoreconf -i'
|
||||
mkdir "${env:build}"
|
||||
Set-Location "${env:build}"
|
||||
Execute-Bash "which ${env:target}-gcc"
|
||||
Execute-Bash "${env:target}-gcc --version"
|
||||
<# compile for mingw
|
||||
# mingw zstd doesn't come with a pkg-config file, so we manually
|
||||
# configure its flags. liblzma just works.
|
||||
#>
|
||||
Execute-Bash "ZSTD_CFLAGS='-L/${env:compiler_path}/include' ZSTD_LIBS='-L/${env:compiler_path}/lib -lzstd' ../configure --prefix=/${env:compiler_path} --build=${env:target} --host=${env:target} --with-openssl-dir=/${env:compiler_path} --disable-asciidoc --enable-fatal-warnings ${env:hardening} CFLAGS='-D__USE_MINGW_ANSI_STDIO=0'"
|
||||
Execute-Bash "V=1 make -k -j2"
|
||||
Execute-Bash "V=1 make -k -j2 install"
|
||||
}
|
||||
|
||||
test_script:
|
||||
- ps: >-
|
||||
if ($env:compiler -eq "mingw") {
|
||||
<# use the MSYS2 compiler binaries to make check #>
|
||||
$oldpath = ${env:Path} -split ';'
|
||||
$buildpath = @("C:\msys64\${env:compiler_path}\bin") + $oldpath
|
||||
$env:Path = $buildpath -join ';'
|
||||
Set-Location "${env:build}"
|
||||
<# Some compiler dlls must be copied to the test and app
|
||||
# directories, before running tor's tests.
|
||||
#>
|
||||
Copy-Item "C:/msys64/${env:compiler_path}/bin/libssp-0.dll","C:/msys64/${env:compiler_path}/bin/zlib1.dll" -Destination "${env:build}/src/test"
|
||||
Copy-Item "C:/msys64/${env:compiler_path}/bin/libssp-0.dll","C:/msys64/${env:compiler_path}/bin/zlib1.dll" -Destination "${env:build}/src/app"
|
||||
<# All installed library dlls must be copied to the test and app
|
||||
# directories, before running tor's tests.
|
||||
# (See install command above.)
|
||||
#>
|
||||
Copy-Item "C:/${env:compiler_path}/bin/libcrypto*.dll","C:/${env:compiler_path}/bin/libssl*.dll","C:/${env:compiler_path}/bin/liblzma*.dll","C:/${env:compiler_path}/bin/libevent*.dll","C:/${env:compiler_path}/bin/libzstd*.dll" -Destination "${env:build}/src/test"
|
||||
Copy-Item "C:/${env:compiler_path}/bin/libcrypto*.dll","C:/${env:compiler_path}/bin/libssl*.dll","C:/${env:compiler_path}/bin/liblzma*.dll","C:/${env:compiler_path}/bin/libevent*.dll","C:/${env:compiler_path}/bin/libzstd*.dll" -Destination "${env:build}/src/app"
|
||||
Execute-Bash "VERBOSE=1 TOR_SKIP_TESTCASES=crypto/openssl_version make -k -j2 check"
|
||||
}
|
||||
|
||||
on_finish:
|
||||
- ps: >-
|
||||
<# if we failed before install:, these functions won't be defined #>
|
||||
Function Execute-Command ($commandPath)
|
||||
{
|
||||
& $commandPath $args 2>&1
|
||||
if ( $LastExitCode -ne 0 ) {
|
||||
$host.SetShouldExit( $LastExitCode )
|
||||
}
|
||||
}
|
||||
Function Execute-Bash ()
|
||||
{
|
||||
Execute-Command 'c:\msys64\usr\bin\bash' '-e' '-c' $args
|
||||
}
|
||||
if ($env:compiler -eq "mingw") {
|
||||
<# use the MSYS2 user binaries to archive failures #>
|
||||
$oldpath = ${env:Path} -split ';'
|
||||
$buildpath = @("C:\msys64\usr\bin") + $oldpath
|
||||
$env:Path = @($buildpath) -join ';'
|
||||
Set-Location "${env:build}"
|
||||
<# store logs as appveyor artifacts: see the artifacts tab #>
|
||||
Execute-Bash "7z a logs.zip config.log || true"
|
||||
Execute-Bash "7z a logs.zip test-suite.log || true"
|
||||
Execute-Bash "appveyor PushArtifact logs.zip || true"
|
||||
Execute-Bash "tail -1000 config.log || true"
|
||||
Execute-Bash "cat test-suite.log || true"
|
||||
}
|
||||
|
||||
# notify the IRC channel of any failures
|
||||
on_failure:
|
||||
- cmd: C:\Python27\python.exe %APPVEYOR_BUILD_FOLDER%\scripts\test\appveyor-irc-notify.py irc.oftc.net:6697 tor-ci failure
|
163
.clang-format
163
.clang-format
@ -1,163 +0,0 @@
|
||||
# DO NOT COMMIT OR MERGE CODE THAT IS RUN THROUGH THIS TOOL YET.
|
||||
#
|
||||
# WE ARE STILL DISCUSSING OUR DESIRED STYLE AND ITERATING ON IT.
|
||||
# (12 Feb 2020)
|
||||
|
||||
---
|
||||
Language: Cpp
|
||||
# Out of all supported styles, LLVM seems closest to our own.
|
||||
BasedOnStyle: LLVM
|
||||
|
||||
################
|
||||
#
|
||||
# Deviations from LLVM's style.
|
||||
#
|
||||
################
|
||||
|
||||
# We prefer an indentation width of 4 columns; LLVM likes 2.
|
||||
## OVERRIDE FOR COMPARISON
|
||||
IndentWidth: 2
|
||||
|
||||
## OVERRIDE FOR COMPARISON
|
||||
## for now i'm not sorting includes, since that makes every file get touched.
|
||||
SortIncludes: false
|
||||
|
||||
# We prefer 79; llvm likes 80.
|
||||
ColumnLimit: 79
|
||||
|
||||
# Where do we want to put backslashes on multiline macros? Our choices are
|
||||
# "as far left as possible", "as far right as possible", and "make no changes."
|
||||
# LLVM defaults to right, but we don't dig that.
|
||||
AlignEscapedNewlines: Left
|
||||
|
||||
# When we see a bunch of things in a row with comments after them, should we
|
||||
# try to align those comments? Doing so makes some of our code pretty ugly.
|
||||
AlignTrailingComments: false
|
||||
|
||||
# We use a function declaration style much closer to BSD KNF than to LLVM's.
|
||||
# We say:
|
||||
# int foo(int x);
|
||||
# int
|
||||
# foo(int x)
|
||||
# {
|
||||
# ...
|
||||
# }
|
||||
# whereas llvm prefers:
|
||||
# int foo(int x);
|
||||
# int foo(int x) {
|
||||
# ...
|
||||
# }
|
||||
# or even:
|
||||
# int foo(int x) { ... }
|
||||
#
|
||||
BreakBeforeBraces: Custom
|
||||
BraceWrapping:
|
||||
AfterFunction: true
|
||||
AllowShortFunctionsOnASingleLine: None
|
||||
AlwaysBreakAfterReturnType: AllDefinitions
|
||||
|
||||
# We don't like blocks to start with an empty line.
|
||||
#
|
||||
KeepEmptyLinesAtTheStartOfBlocks: false
|
||||
|
||||
################
|
||||
#
|
||||
# Tor-specific magic
|
||||
#
|
||||
################
|
||||
|
||||
#
|
||||
# These comments are magical, and should not be changed.
|
||||
#
|
||||
CommentPragmas: 'LCOV_EXCL|COVERITY'
|
||||
|
||||
#
|
||||
# Remove duplicate empty lines.
|
||||
#
|
||||
MaxEmptyLinesToKeep: 1
|
||||
|
||||
#
|
||||
# Indent preprocessor directives, for clarity.
|
||||
#
|
||||
IndentPPDirectives: AfterHash
|
||||
|
||||
#
|
||||
# These introduce an iteration, and work a bit like a for loop.
|
||||
#
|
||||
# Note that we can NOT include ones that don't work like "for". For example,
|
||||
# if the body is an argument to the macro, we can't list it here.
|
||||
#
|
||||
ForEachMacros:
|
||||
- MAP_FOREACH
|
||||
- MAP_FOREACH_MODIFY
|
||||
- TOR_SIMPLEQ_FOREACH
|
||||
- TOR_SIMPLEQ_FOREACH_SAFE
|
||||
- TOR_SLIST_FOREACH
|
||||
- TOR_SLIST_FOREACH_SAFE
|
||||
- TOR_LIST_FOREACH
|
||||
- TOR_LIST_FOREACH_SAFE
|
||||
- TOR_TAILQ_FOREACH
|
||||
- TOR_TAILQ_FOREACH_SAFE
|
||||
- TOR_TAILQ_FOREACH_REVERSE
|
||||
- TOR_TAILQ_FOREACH_REVERSE_SAFE
|
||||
- TOR_CIRCLEQ_FOREACH
|
||||
- TOR_CIRCLEQ_FOREACH_SAFE
|
||||
- TOR_CIRCLEQ_FOREACH_REVERSE
|
||||
- TOR_CIRCLEQ_FOREACH_REVERSE_SAFE
|
||||
- HT_FOREACH
|
||||
- SMARTLIST_FOREACH_BEGIN
|
||||
- DIGESTMAP_FOREACH
|
||||
- DIGESTMAP_FOREACH_MODIFY
|
||||
- DIGEST256MAP_FOREACH
|
||||
- DIGEST256MAP_FOREACH_MODIFY
|
||||
- SDMAP_FOREACH
|
||||
- RIMAP_FOREACH
|
||||
- EIMAP_FOREACH
|
||||
|
||||
#
|
||||
# Omitting:
|
||||
#
|
||||
# - SMARTLIST_FOREACH, since the body of the loop is an argument.
|
||||
|
||||
#
|
||||
# This explains how to sort our headers.
|
||||
#
|
||||
# This is more complex than it truly should be, but I've edited this till
|
||||
# compilation still mostly passes.
|
||||
#
|
||||
# I'm disabling this, however, since it's a distraction from the other
|
||||
# formatting issues. See SortIncludes above.
|
||||
#
|
||||
IncludeCategories:
|
||||
- Regex: '^"orconfig.h'
|
||||
Priority: -30
|
||||
- Regex: '^"ext/'
|
||||
Priority: -18
|
||||
- Regex: '^"lib/'
|
||||
Priority: -10
|
||||
- Regex: '^"core/or/or.h'
|
||||
Priority: -5
|
||||
- Regex: '^"core/'
|
||||
Priority: 5
|
||||
- Regex: '^"feature/'
|
||||
Priority: 10
|
||||
- Regex: '^"app/'
|
||||
Priority: 20
|
||||
|
||||
#
|
||||
# These macros should always cause indentation, as though they were { and }.
|
||||
#
|
||||
# Do NOT put macros here unless you want an extra level of indentation between
|
||||
# them whenever they appear.
|
||||
#
|
||||
MacroBlockBegin: "^STMT_BEGIN|TT_STMT_BEGIN$"
|
||||
MacroBlockEnd: "^STMT_END|TT_STMT_END$"
|
||||
|
||||
#
|
||||
# These macros are interpreted as types.
|
||||
# (Not supported in my clang-format)
|
||||
#
|
||||
# TypenameMacros:
|
||||
# - "STACK_OF"
|
||||
|
||||
...
|
@ -1,33 +0,0 @@
|
||||
# this should work for all editors that support .editorconfig!
|
||||
#
|
||||
# on debian, emacs users should install elpa-editorconfig and vim
|
||||
# users should install vim-editorconfig.
|
||||
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
# this remove final newline in some editors, instead of inserting it
|
||||
# insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
max_line_length = 79
|
||||
|
||||
[*.py]
|
||||
indent_size = 4
|
||||
# to do not have final newline in python code
|
||||
insert_final_newline = true
|
||||
|
||||
[*.c, *.h]
|
||||
# done in tests and other cases do not use 2 spaces identations, so this
|
||||
# should be commented on those cases
|
||||
indent_size = 2
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[Makefile, *.am]
|
||||
indent_style = tab
|
||||
|
||||
[*-spec.txt]
|
||||
# specs seem to have 76 chars max per lines as RFCs
|
||||
max_line_length = 76
|
213
.gitignore
vendored
213
.gitignore
vendored
@ -1,213 +0,0 @@
|
||||
# Editor droppings
|
||||
\#*\#
|
||||
.#*
|
||||
*~
|
||||
*.swp
|
||||
*.swo
|
||||
# C stuff
|
||||
*.o
|
||||
*.a
|
||||
*.obj
|
||||
# Diff droppings
|
||||
*.orig
|
||||
*.rej
|
||||
# gcov stuff
|
||||
*.gcno
|
||||
*.gcov
|
||||
*.gcda
|
||||
# Autotools stuff
|
||||
.deps
|
||||
.dirstamp
|
||||
*.trs
|
||||
*.log
|
||||
# Calltool stuff
|
||||
.*.graph
|
||||
# Stuff made by our makefiles
|
||||
*.bak
|
||||
# Python droppings
|
||||
*.pyc
|
||||
*.pyo
|
||||
# Cscope
|
||||
cscope.*
|
||||
# OSX junk
|
||||
*.dSYM
|
||||
.DS_Store
|
||||
# updateFallbackDirs.py temp files
|
||||
details-*.json
|
||||
uptime-*.json
|
||||
*.full_url
|
||||
*.last_modified
|
||||
# Core files
|
||||
core
|
||||
core.*
|
||||
!core/
|
||||
# ccls file that can be per directory.
|
||||
*.ccls
|
||||
|
||||
# /
|
||||
/Makefile
|
||||
/Makefile.in
|
||||
/aclocal.m4
|
||||
/ar-lib
|
||||
/autom4te.cache
|
||||
/build-stamp
|
||||
/compile
|
||||
/configure
|
||||
/Doxyfile
|
||||
/orconfig.h
|
||||
/orconfig.h.in
|
||||
/config.cache
|
||||
/config.log
|
||||
/config.status
|
||||
/config.guess
|
||||
/config.sub
|
||||
/conftest*
|
||||
/micro-revision.*
|
||||
/patch-stamp
|
||||
/stamp-h
|
||||
/stamp-h.in
|
||||
/stamp-h1
|
||||
/tags
|
||||
/TAGS
|
||||
/test-driver
|
||||
/tor.sh
|
||||
/tor.spec
|
||||
/depcomp
|
||||
/install-sh
|
||||
/missing
|
||||
/mkinstalldirs
|
||||
/Tor*Bundle.dmg
|
||||
/tor-*-win32.exe
|
||||
/warning_flags
|
||||
/compile_commands.json
|
||||
|
||||
/coverage_html/
|
||||
/callgraph/
|
||||
|
||||
# /contrib/
|
||||
/contrib/dist/torctl
|
||||
/contrib/dist/tor.service
|
||||
/contrib/operator-tools/tor.logrotate
|
||||
|
||||
# /debian/
|
||||
/debian/files
|
||||
/debian/micro-revision.i
|
||||
/debian/patched
|
||||
/debian/tor
|
||||
/debian/tor.postinst.debhelper
|
||||
/debian/tor.postrm.debhelper
|
||||
/debian/tor.prerm.debhelper
|
||||
/debian/tor.substvars
|
||||
|
||||
# /doc/
|
||||
/doc/Makefile
|
||||
/doc/Makefile.in
|
||||
/doc/doxygen
|
||||
/doc/man/tor.1
|
||||
/doc/man/tor.1.in
|
||||
/doc/man/tor.html
|
||||
/doc/man/tor.html.in
|
||||
/doc/man/tor.1.xml
|
||||
/doc/man/tor-gencert.1
|
||||
/doc/man/tor-gencert.1.in
|
||||
/doc/man/tor-gencert.html
|
||||
/doc/man/tor-gencert.html.in
|
||||
/doc/man/tor-gencert.1.xml
|
||||
/doc/man/tor-resolve.1
|
||||
/doc/man/tor-resolve.1.in
|
||||
/doc/man/tor-resolve.html
|
||||
/doc/man/tor-resolve.html.in
|
||||
/doc/man/tor-resolve.1.xml
|
||||
/doc/man/torify.1
|
||||
/doc/man/torify.1.in
|
||||
/doc/man/torify.html
|
||||
/doc/man/torify.html.in
|
||||
/doc/man/torify.1.xml
|
||||
/doc/man/tor-print-ed-signing-cert.1
|
||||
/doc/man/tor-print-ed-signing-cert.1.in
|
||||
/doc/man/tor-print-ed-signing-cert.html
|
||||
/doc/man/tor-print-ed-signing-cert.html.in
|
||||
/doc/man/tor-print-ed-signing-cert.1.xml
|
||||
|
||||
# /doc/spec/
|
||||
/doc/spec/Makefile
|
||||
/doc/spec/Makefile.in
|
||||
|
||||
# /scripts
|
||||
/scripts/maint/checkOptionDocs.pl
|
||||
/scripts/maint/updateVersions.pl
|
||||
|
||||
# /src/
|
||||
/src/Makefile
|
||||
/src/Makefile.in
|
||||
|
||||
# /src/config/
|
||||
/src/config/Makefile
|
||||
/src/config/Makefile.in
|
||||
/src/config/sample-server-torrc
|
||||
/src/config/torrc
|
||||
/src/config/torrc.sample
|
||||
/src/config/torrc.minimal
|
||||
|
||||
# /src/ext/
|
||||
/src/ext/ed25519/ref10/libed25519_ref10.lib
|
||||
/src/ext/ed25519/donna/libed25519_donna.lib
|
||||
/src/ext/keccak-tiny/libkeccak-tiny.lib
|
||||
|
||||
# /src/app
|
||||
/src/app/tor
|
||||
/src/app/tor.exe
|
||||
/src/app/tor-cov
|
||||
/src/app/tor-cov.exe
|
||||
|
||||
# /src/test
|
||||
/src/test/Makefile
|
||||
/src/test/Makefile.in
|
||||
/src/test/bench
|
||||
/src/test/bench.exe
|
||||
/src/test/test
|
||||
/src/test/test-slow
|
||||
/src/test/test-bt-cl
|
||||
/src/test/test-process
|
||||
/src/test/test-memwipe
|
||||
/src/test/test-ntor-cl
|
||||
/src/test/test-hs-ntor-cl
|
||||
/src/test/test-rng
|
||||
/src/test/test-switch-id
|
||||
/src/test/test-timers
|
||||
/src/test/test_workqueue
|
||||
/src/test/test.exe
|
||||
/src/test/test-slow.exe
|
||||
/src/test/test-bt-cl.exe
|
||||
/src/test/test-process.exe
|
||||
/src/test/test-ntor-cl.exe
|
||||
/src/test/test-hs-ntor-cl.exe
|
||||
/src/test/test-memwipe.exe
|
||||
/src/test/test-rng.exe
|
||||
/src/test/test-switch-id.exe
|
||||
/src/test/test-timers.exe
|
||||
/src/test/test_workqueue.exe
|
||||
|
||||
# /src/test/fuzz
|
||||
/src/test/fuzz/fuzz-*
|
||||
/src/test/fuzz/lf-fuzz-*
|
||||
|
||||
# /src/tools/
|
||||
/src/tools/tor-checkkey
|
||||
/src/tools/tor-resolve
|
||||
/src/tools/tor-cov-resolve
|
||||
/src/tools/tor-gencert
|
||||
/src/tools/tor-print-ed-signing-cert
|
||||
/src/tools/tor-print-ed-signing-cert.exe
|
||||
/src/tools/tor-cov-gencert
|
||||
/src/tools/tor-checkkey.exe
|
||||
/src/tools/tor-resolve.exe
|
||||
/src/tools/tor-cov-resolve.exe
|
||||
/src/tools/tor-gencert.exe
|
||||
/src/tools/tor-cov-gencert.exe
|
||||
/src/tools/Makefile
|
||||
/src/tools/Makefile.in
|
||||
|
||||
# /src/win32/
|
||||
/src/win32/Makefile
|
||||
/src/win32/Makefile.in
|
254
.gitlab-ci.yml
254
.gitlab-ci.yml
@ -1,254 +0,0 @@
|
||||
####
|
||||
# DO NOT EDIT THIS FILE IN MASTER. ONLY EDIT IT IN THE OLDEST SUPPORTED
|
||||
# BRANCH, THEN MERGE FORWARD.
|
||||
####
|
||||
|
||||
# This file controls how gitlab validates Tor commits and merge requests.
|
||||
#
|
||||
# It is primarily based on a set of scripts and configurations by
|
||||
# Hans-Christoph Steiner. It only copies parts of those scripts and
|
||||
# configurations for now. If you want a new piece of functionality
|
||||
# (more debians, more fedoras, android support) then you shouldn't
|
||||
# start from scratch: have a look at the original ticket, at
|
||||
# https://gitlab.torproject.org/tpo/core/tor/-/issues/32193 !
|
||||
#
|
||||
# The file to copy from is
|
||||
# https://gitlab.torproject.org/tpo/core/tor/-/merge_requests/96/diffs#diff-content-587d266bb27a4dc3022bbed44dfa19849df3044c
|
||||
#
|
||||
# Having said that, if there is anything really stupid here, don't
|
||||
# blame it on Hans-Christoph! Tor probably added it on their own.
|
||||
#
|
||||
# Copyright 2020, The Tor Project, Inc.
|
||||
# See LICENSE for licence information.
|
||||
|
||||
# These variables are set everywhere, unconditionally.
|
||||
variables:
|
||||
TERM: "ansi"
|
||||
DEBUG_CI: "yes"
|
||||
|
||||
# This template is for exporting ephemeral things from the scripts. By
|
||||
# convention we expect our scripts to copy stuff into artifacts/, rather than
|
||||
# having a big list of files that be treated as artifacts.
|
||||
.artifacts-template: &artifacts-template
|
||||
artifacts:
|
||||
name: "${CI_PROJECT_PATH}_${CI_JOB_STAGE}_${CI_COMMIT_REF_NAME}_${CI_COMMIT_SHA}"
|
||||
expire_in: 1 week
|
||||
when: always
|
||||
paths:
|
||||
- artifacts/
|
||||
|
||||
|
||||
# This template is used for x86-64 builds.
|
||||
.x86-64-template: &x86-64-template
|
||||
tags:
|
||||
- amd64
|
||||
|
||||
# This template should be usable on any system that's based on apt.
|
||||
.apt-template: &apt-template |
|
||||
export LC_ALL=C.UTF-8
|
||||
echo Etc/UTC > /etc/timezone
|
||||
mkdir -p apt-cache
|
||||
export APT_CACHE_DIR="$(pwd)/apt-cache"
|
||||
rm -f /etc/apt/apt.conf.d/docker-clean
|
||||
echo 'quiet "1";' \
|
||||
'APT::Install-Recommends "0";' \
|
||||
'APT::Install-Suggests "0";' \
|
||||
'APT::Acquire::Retries "20";' \
|
||||
'APT::Get::Assume-Yes "true";' \
|
||||
'Dpkg::Use-Pty "0";' \
|
||||
"Dir::Cache::Archives \"${APT_CACHE_DIR}\"; " \
|
||||
>> /etc/apt/apt.conf.d/99gitlab
|
||||
apt-get update -qq
|
||||
apt-get upgrade -qy
|
||||
|
||||
# This template sets us up for Debian system in particular.
|
||||
.debian-template: &debian-template
|
||||
<<: *artifacts-template
|
||||
<<: *x86-64-template
|
||||
variables:
|
||||
DEBIAN_FRONTEND: "noninteractive"
|
||||
# TODO: Using "cache" in this way speeds up our downloads. It would be
|
||||
# even better, though, to start with a pre-upgraded debian image.
|
||||
#
|
||||
# TODO: Will we have to do this differently once we have more than one
|
||||
# debian version that we're using?
|
||||
cache:
|
||||
key: apt
|
||||
paths:
|
||||
- apt-cache
|
||||
before_script:
|
||||
- *apt-template
|
||||
# Install patches unconditionally.
|
||||
- apt-get install
|
||||
apt-utils
|
||||
automake
|
||||
build-essential
|
||||
ca-certificates
|
||||
file
|
||||
git
|
||||
libevent-dev
|
||||
liblzma-dev
|
||||
libscrypt-dev
|
||||
libseccomp-dev
|
||||
libssl-dev
|
||||
pkg-config
|
||||
python3
|
||||
zlib1g-dev
|
||||
# Install patches that we only need for some use cases.
|
||||
- if [ "$ASCIIDOC" = yes ]; then apt-get install asciidoc xmlto; fi
|
||||
- if [ "$DOXYGEN" = yes ]; then apt-get install doxygen; fi
|
||||
- if [ "$STEM" = yes ]; then apt-get install timelimit; fi
|
||||
- if [ "$CC" = clang ]; then apt-get install clang; fi
|
||||
- if [ "$NSS" = yes ]; then apt-get install libnss3 libnss3-dev; fi
|
||||
# llvm-symbolizer for sanitizer backtrace
|
||||
- if [ "$HARDENING" = yes ]; then apt-get install llvm; fi
|
||||
# TODO: This next line should not be debian-only.
|
||||
- if [ "$STEM" = yes ]; then git clone --depth 1 https://gitlab.torproject.org/tpo/network-health/stem.git ; export STEM_PATH="$(pwd)/stem"; fi
|
||||
# TODO: This next line should not be debian-only.
|
||||
- if [ "$CHUTNEY" = yes ]; then git clone --depth 1 https://gitlab.torproject.org/tpo/core/chutney.git ; export CHUTNEY_PATH="$(pwd)/chutney"; fi
|
||||
- if [ "$TRACING" = yes ]; then apt install liblttng-ust-dev; fi
|
||||
|
||||
# Minimal check on debian: just make, make check.
|
||||
#
|
||||
debian-minimal:
|
||||
image: debian:bullseye
|
||||
<<: *debian-template
|
||||
script:
|
||||
- ./scripts/ci/ci-driver.sh
|
||||
|
||||
# Minimal check on debian/i386: just make, make check.
|
||||
#
|
||||
debian-i386-minimal:
|
||||
image: i386/debian:bullseye
|
||||
<<: *debian-template
|
||||
script:
|
||||
- ./scripts/ci/ci-driver.sh
|
||||
|
||||
#####
|
||||
# Run "make check" with a hardened clang on debian stable. This takes
|
||||
# care of a hardening check, and a compile-with-clang check.
|
||||
#
|
||||
# TODO: This will be faster once we merge #40098 and #40099.
|
||||
debian-hardened:
|
||||
image: debian:bullseye
|
||||
<<: *debian-template
|
||||
variables:
|
||||
ALL_BUGS_ARE_FATAL: "yes"
|
||||
HARDENING: "yes"
|
||||
CC: "clang"
|
||||
script:
|
||||
- ./scripts/ci/ci-driver.sh
|
||||
|
||||
#####
|
||||
# Distcheck on debian stable
|
||||
debian-distcheck:
|
||||
image: debian:bullseye
|
||||
<<: *debian-template
|
||||
variables:
|
||||
DISTCHECK: "yes"
|
||||
CHECK: "no"
|
||||
script:
|
||||
- ./scripts/ci/ci-driver.sh
|
||||
|
||||
#####
|
||||
# Documentation tests on debian stable: doxygen and asciidoc.
|
||||
debian-docs:
|
||||
image: debian:bullseye
|
||||
<<: *debian-template
|
||||
variables:
|
||||
DOXYGEN: "yes"
|
||||
ASCIIDOC: "yes"
|
||||
CHECK: "no"
|
||||
RUN_STAGE_BUILD: "no"
|
||||
script:
|
||||
- ./scripts/ci/ci-driver.sh
|
||||
|
||||
#####
|
||||
# Integration tests on debian stable: chutney and stem.
|
||||
#
|
||||
# TODO: It would be cool if this target didn't have to re-build tor, and
|
||||
# could instead re-use Tor from debian-minimal. That can be done
|
||||
# with the 'artifacts' mechanism, in theory, but it would be good to
|
||||
# avoid having to have a system with hundreds of artifacts.
|
||||
debian-integration:
|
||||
image: debian:bullseye
|
||||
<<: *debian-template
|
||||
variables:
|
||||
CHECK: "no"
|
||||
CHUTNEY: "yes"
|
||||
CHUTNEY_MAKE_TARGET: "test-network-all"
|
||||
STEM: "yes"
|
||||
ALL_BUGS_ARE_FATAL: "yes"
|
||||
script:
|
||||
- ./scripts/ci/ci-driver.sh
|
||||
|
||||
#####
|
||||
# Tracing build on Debian stable.
|
||||
debian-tracing:
|
||||
image: debian:bullseye
|
||||
<<: *debian-template
|
||||
variables:
|
||||
TRACING: "yes"
|
||||
CHECK: "no"
|
||||
DISTCHECK: "yes"
|
||||
script:
|
||||
- ./scripts/ci/ci-driver.sh
|
||||
|
||||
#####
|
||||
# No-authority mode
|
||||
debian-disable-dirauth:
|
||||
image: debian:bullseye
|
||||
<<: *debian-template
|
||||
variables:
|
||||
DISABLE_DIRAUTH: "yes"
|
||||
script:
|
||||
- ./scripts/ci/ci-driver.sh
|
||||
|
||||
#####
|
||||
# No-relay mode
|
||||
debian-disable-relay:
|
||||
image: debian:bullseye
|
||||
<<: *debian-template
|
||||
variables:
|
||||
DISABLE_RELAY: "yes"
|
||||
script:
|
||||
- ./scripts/ci/ci-driver.sh
|
||||
|
||||
#####
|
||||
# GPL licensed mode, enables pow module
|
||||
debian-gpl:
|
||||
image: debian:buster
|
||||
<<: *debian-template
|
||||
variables:
|
||||
GPL: "yes"
|
||||
script:
|
||||
- ./scripts/ci/ci-driver.sh
|
||||
|
||||
#####
|
||||
# NSS check on debian
|
||||
debian-nss:
|
||||
image: debian:bullseye
|
||||
<<: *debian-template
|
||||
variables:
|
||||
NSS: "yes"
|
||||
script:
|
||||
- ./scripts/ci/ci-driver.sh
|
||||
|
||||
#####
|
||||
# Debian packaging triggers for maintenance branches
|
||||
debian-packaging-0.4.5:
|
||||
stage: deploy
|
||||
trigger:
|
||||
project: tpo/core/debian/tor
|
||||
branch: debian-0.4.5
|
||||
rules:
|
||||
- if: $CI_PROJECT_NAMESPACE == "tpo/core" &&
|
||||
$CI_COMMIT_BRANCH == "maint-0.4.5"
|
||||
debian-packaging-0.4.6:
|
||||
stage: deploy
|
||||
trigger:
|
||||
project: tpo/core/debian/tor
|
||||
branch: debian-0.4.6
|
||||
rules:
|
||||
- if: $CI_PROJECT_NAMESPACE == "tpo/core" &&
|
||||
$CI_COMMIT_BRANCH == "maint-0.4.6"
|
@ -1,32 +0,0 @@
|
||||
### Summary
|
||||
|
||||
|
||||
|
||||
### Steps to reproduce:
|
||||
|
||||
1. Step 1
|
||||
2. ...
|
||||
|
||||
### What is the current bug behavior?
|
||||
|
||||
|
||||
|
||||
### What is the expected behavior?
|
||||
|
||||
|
||||
|
||||
### Environment
|
||||
|
||||
- Which version of Tor are you using? Run `tor --version` to get the version if you are unsure.
|
||||
- Which operating system are you using? For example: Debian GNU/Linux 10.1, Windows 10, Ubuntu Xenial, FreeBSD 12.2, etc.
|
||||
- Which installation method did you use? Distribution package (apt, pkg, homebrew), from source tarball, from Git, etc.
|
||||
|
||||
### Relevant logs and/or screenshots
|
||||
|
||||
|
||||
|
||||
### Possible fixes
|
||||
|
||||
|
||||
|
||||
/label ~Bug
|
@ -1,9 +0,0 @@
|
||||
### Summary
|
||||
|
||||
|
||||
|
||||
### What is the expected behavior?
|
||||
|
||||
|
||||
|
||||
/label ~Feature
|
238
.travis.yml
238
.travis.yml
@ -1,238 +0,0 @@
|
||||
language: c
|
||||
|
||||
cache:
|
||||
ccache: true
|
||||
|
||||
compiler:
|
||||
- gcc
|
||||
|
||||
os:
|
||||
- linux
|
||||
|
||||
## We don't use the build matrix cross-product, because it makes too many jobs
|
||||
## Instead, we list each job under matrix: include:
|
||||
env:
|
||||
global:
|
||||
## The Travis CI environment allows us two cores, so let's use both. Also,
|
||||
## let's use the "-k" flag so that we get all of the compilation failures,
|
||||
## not just the first one.
|
||||
- MAKEFLAGS="-k -j 2"
|
||||
## We turn on hardening by default
|
||||
## Also known as --enable-fragile-hardening in 0.3.0.3-alpha and later
|
||||
- HARDENING_OPTIONS="--enable-all-bugs-are-fatal --enable-expensive-hardening"
|
||||
## We turn off asciidoc by default, because it's slow
|
||||
- ASCIIDOC_OPTIONS="--disable-asciidoc"
|
||||
## Turn off tor's sandbox in chutney, until we fix sandbox errors that are
|
||||
## triggered by Ubuntu Xenial and Bionic. See #32722.
|
||||
- CHUTNEY_TOR_SANDBOX="0"
|
||||
## The default target for chutney jobs
|
||||
- CHUTNEY_MAKE="test-network-all"
|
||||
matrix:
|
||||
## This matrix entry is required, but it doesn't actually create any jobs
|
||||
-
|
||||
|
||||
matrix:
|
||||
## include creates builds with gcc, linux, unless we override those defaults
|
||||
include:
|
||||
## We run chutney on macOS, because macOS Travis has IPv6
|
||||
## But we only run the IPv6 chutney tests, to speed up the job
|
||||
- env: CHUTNEY_MAKE="test-network-ipv6" CHUTNEY="yes" CHUTNEY_ALLOW_FAILURES="2" SKIP_MAKE_CHECK="yes"
|
||||
os: osx
|
||||
|
||||
## We also run basic tests on macOS
|
||||
- compiler: clang
|
||||
os: osx
|
||||
## Turn off some newer features, turn on clang's -Wtypedef-redefinition
|
||||
## Also, disable ALL_BUGS_ARE_FATAL macro.
|
||||
env: C_DIALECT_OPTIONS="-std=gnu99" HARDENING_OPTIONS="--enable-expensive-hardening"
|
||||
|
||||
## We run chutney on Linux, because it's faster than chutney on macOS
|
||||
## Chutney is a fast job, clang is slower on Linux, so we do Chutney clang
|
||||
- env: CHUTNEY="yes" CHUTNEY_ALLOW_FAILURES="2" SKIP_MAKE_CHECK="yes"
|
||||
compiler: clang
|
||||
|
||||
## We check asciidoc with distcheck, to make sure we remove doc products
|
||||
- env: DISTCHECK="yes" ASCIIDOC_OPTIONS="" SKIP_MAKE_CHECK="yes"
|
||||
|
||||
## We check disable module relay
|
||||
- env: MODULES_OPTIONS="--disable-module-relay" HARDENING_OPTIONS="--enable-expensive-hardening"
|
||||
## We check disable module dirauth
|
||||
- env: MODULES_OPTIONS="--disable-module-dirauth" HARDENING_OPTIONS="--enable-expensive-hardening"
|
||||
|
||||
## We check NSS
|
||||
## Use -std=gnu99 to turn off some newer features, and maybe turn on some
|
||||
## extra gcc warnings?
|
||||
- env: NSS_OPTIONS="--enable-nss" C_DIALECT_OPTIONS="-std=gnu99" HARDENING_OPTIONS="--enable-expensive-hardening"
|
||||
|
||||
## We include a single coverage build with the best options for coverage
|
||||
- env: COVERAGE_OPTIONS="--enable-coverage" HARDENING_OPTIONS="" TOR_TEST_RNG_SEED="636f766572616765"
|
||||
|
||||
## We clone our stem repo and run `make test-stem`
|
||||
- env: TEST_STEM="yes" SKIP_MAKE_CHECK="yes"
|
||||
|
||||
## We run `make doxygen` without `make check`.
|
||||
- env: SKIP_MAKE_CHECK="yes" DOXYGEN="yes"
|
||||
|
||||
## Allow the build to report success (with non-required sub-builds
|
||||
## continuing to run) if all required sub-builds have succeeded.
|
||||
fast_finish: true
|
||||
|
||||
## Careful! We use global envs, which makes it hard to allow failures by env:
|
||||
## https://docs.travis-ci.com/user/customizing-the-build#matching-jobs-with-allow_failures
|
||||
allow_failures:
|
||||
## Since we're actively developing IPv6, we want to require the IPv6
|
||||
## chutney tests
|
||||
#- env: CHUTNEY_MAKE="test-network-ipv6" CHUTNEY="yes" CHUTNEY_ALLOW_FAILURES="2" SKIP_MAKE_CHECK="yes"
|
||||
# os: osx
|
||||
|
||||
## (Linux only) Use a recent Linux image (Ubuntu Bionic)
|
||||
dist: bionic
|
||||
|
||||
## Download our dependencies
|
||||
addons:
|
||||
## (Linux only)
|
||||
apt:
|
||||
packages:
|
||||
## Required dependencies
|
||||
- libevent-dev
|
||||
## Ubuntu comes with OpenSSL by default
|
||||
#- libssl-dev
|
||||
- zlib1g-dev
|
||||
## Optional dependencies
|
||||
- libcap-dev
|
||||
- liblzma-dev
|
||||
- libnss3-dev
|
||||
- libscrypt-dev
|
||||
- libseccomp-dev
|
||||
- libzstd-dev
|
||||
## Optional build dependencies
|
||||
- coccinelle
|
||||
- shellcheck
|
||||
## Conditional build dependencies
|
||||
## Always installed, so we don't need sudo
|
||||
- asciidoc
|
||||
- docbook-xsl
|
||||
- docbook-xml
|
||||
- xmlto
|
||||
- doxygen
|
||||
## Utilities
|
||||
## preventing or diagnosing hangs
|
||||
- timelimit
|
||||
## (OSX only)
|
||||
homebrew:
|
||||
packages:
|
||||
## Required dependencies
|
||||
- libevent
|
||||
## The OSX version of OpenSSL is way too old
|
||||
- openssl
|
||||
## OSX comes with zlib by default
|
||||
## to use a newer zlib, pass the keg path to configure (like OpenSSL)
|
||||
#- zlib
|
||||
## Optional dependencies
|
||||
- libscrypt
|
||||
- xz
|
||||
- zstd
|
||||
## Required build dependencies
|
||||
## Tor needs pkg-config to find some dependencies at build time
|
||||
- pkg-config
|
||||
## Optional build dependencies
|
||||
- ccache
|
||||
- coccinelle
|
||||
- shellcheck
|
||||
## Conditional build dependencies
|
||||
## Always installed, because manual brew installs are hard to get right
|
||||
- asciidoc
|
||||
- xmlto
|
||||
## Utilities
|
||||
## preventing or diagnosing hangs
|
||||
- timelimit
|
||||
|
||||
## (OSX only) Use a recent macOS image
|
||||
## See https://docs.travis-ci.com/user/reference/osx#os-x-version
|
||||
## Default is Xcode 9.4 on macOS 10.13 as of October 2019
|
||||
## Recent is Xcode 11.2 on macOS 10.14 as of October 2019
|
||||
osx_image: xcode11.2
|
||||
|
||||
before_install:
|
||||
## Set pipefail: we use pipes
|
||||
- set -o pipefail || echo "pipefail failed"
|
||||
|
||||
install:
|
||||
## If we're on OSX, configure ccache (ccache is automatically installed and configured on Linux)
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then export PATH="/usr/local/opt/ccache/libexec:$PATH"; fi
|
||||
## If we're on OSX, OpenSSL is keg-only, so tor 0.2.9 and later need to be configured --with-openssl-dir= to build
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then OPENSSL_OPTIONS=--with-openssl-dir=`brew --prefix openssl`; fi
|
||||
## Install conditional features
|
||||
## Install coveralls
|
||||
- if [[ "$COVERAGE_OPTIONS" != "" ]]; then pip install --user cpp-coveralls; fi
|
||||
## If we're on OSX, and using asciidoc, configure asciidoc
|
||||
- if [[ "$ASCIIDOC_OPTIONS" == "" ]] && [[ "$TRAVIS_OS_NAME" == "osx" ]]; then export XML_CATALOG_FILES="/usr/local/etc/xml/catalog"; fi
|
||||
## If we're running chutney, install it.
|
||||
- if [[ "$CHUTNEY" != "" ]]; then git clone --depth 1 https://github.com/torproject/chutney.git ; export CHUTNEY_PATH="$(pwd)/chutney"; fi
|
||||
## If we're running stem, install it.
|
||||
- if [[ "$TEST_STEM" != "" ]]; then git clone --depth 1 https://github.com/torproject/stem.git ; export STEM_SOURCE_DIR=`pwd`/stem; fi
|
||||
##
|
||||
## Finally, list installed package versions
|
||||
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then dpkg-query --show; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew list --versions; fi
|
||||
## Get python version
|
||||
- python --version
|
||||
## If we're running chutney, show the chutney commit
|
||||
- if [[ "$CHUTNEY" != "" ]]; then pushd "$CHUTNEY_PATH"; git log -1 ; popd ; fi
|
||||
## If we're running stem, show the stem version and commit
|
||||
- if [[ "$TEST_STEM" != "" ]]; then pushd stem; python -c "from stem import stem; print(stem.__version__);"; git log -1; popd; fi
|
||||
## Get the coccinelle version
|
||||
## Installs are unreliable on macOS, so we just rely on brew list --versions
|
||||
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then spatch --version; fi
|
||||
## We don't want Tor tests to depend on default configuration file at
|
||||
## ~/.torrc. So we put some random bytes in there, to make sure we get build
|
||||
## failures in case Tor is reading it during CI jobs.
|
||||
- dd ibs=1 count=1024 if=/dev/urandom > ~/.torrc
|
||||
|
||||
script:
|
||||
# Skip test_rebind and test_include on macOS
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then export TOR_SKIP_TEST_REBIND=true; export TOR_SKIP_TEST_INCLUDE=true; fi
|
||||
- ./autogen.sh
|
||||
- CONFIGURE_FLAGS="$ASCIIDOC_OPTIONS $COVERAGE_OPTIONS $HARDENING_OPTIONS $MODULES_OPTIONS $NSS_OPTIONS $OPENSSL_OPTIONS --enable-fatal-warnings --disable-silent-rules"
|
||||
- echo "Configure flags are $CONFIGURE_FLAGS CC=\"$CC $C_DIALECT_OPTIONS\""
|
||||
- ./configure $CONFIGURE_FLAGS CC="$CC $C_DIALECT_OPTIONS";
|
||||
## We run `make check` because that's what https://jenkins.torproject.org does.
|
||||
- if [[ "$SKIP_MAKE_CHECK" == "" ]]; then make check; fi
|
||||
- if [[ "$DISTCHECK" != "" ]]; then make distcheck DISTCHECK_CONFIGURE_FLAGS="$CONFIGURE_FLAGS"; fi
|
||||
- if [[ "$CHUTNEY" != "" ]]; then make "$CHUTNEY_MAKE"; fi
|
||||
## Diagnostic for bug 29437: kill stem if it hangs for 9.5 minutes
|
||||
## Travis will kill the job after 10 minutes with no output
|
||||
- if [[ "$TEST_STEM" != "" ]]; then make src/app/tor; timelimit -p -t 540 -s USR1 -T 30 -S ABRT python3 "$STEM_SOURCE_DIR"/run_tests.py --tor src/app/tor --integ --test control.controller --test control.base_controller --test process --log TRACE --log-file stem.log; fi
|
||||
- if [[ "$DOXYGEN" != "" ]]; then make doxygen; fi
|
||||
## If this build was one that produced coverage, upload it.
|
||||
- if [[ "$COVERAGE_OPTIONS" != "" ]]; then coveralls -b . --exclude src/test --exclude src/trunnel --gcov-options '\-p' || echo "Coverage failed"; fi
|
||||
|
||||
after_failure:
|
||||
## configure will leave a log file with more details of config failures.
|
||||
## But the log is too long for travis' rendered view, so tail it.
|
||||
- tail -1000 config.log || echo "tail failed"
|
||||
## `make check` will leave a log file with more details of test failures.
|
||||
- if [[ "$SKIP_MAKE_CHECK" == "" ]]; then cat test-suite.log || echo "cat failed"; fi
|
||||
## `make distcheck` puts it somewhere different.
|
||||
- if [[ "$DISTCHECK" != "" ]]; then make show-distdir-testlog || echo "make failed"; fi
|
||||
- if [[ "$DISTCHECK" != "" ]]; then make show-distdir-core || echo "make failed"; fi
|
||||
- if [[ "$CHUTNEY" != "" ]]; then "$CHUTNEY_PATH/tools/diagnostics.sh" || echo "diagnostics failed"; ls test_network_log || echo "ls failed"; cat test_network_log/* || echo "cat failed"; fi
|
||||
- if [[ "$TEST_STEM" != "" ]]; then tail -1000 "$STEM_SOURCE_DIR"/test/data/tor_log || echo "tail failed"; fi
|
||||
- if [[ "$TEST_STEM" != "" ]]; then grep -v "SocketClosed" stem.log | tail -1000 || echo "grep | tail failed"; fi
|
||||
|
||||
before_cache:
|
||||
## Delete all gcov files.
|
||||
- if [[ "$COVERAGE_OPTIONS" != "" ]]; then make reset-gcov; fi
|
||||
|
||||
notifications:
|
||||
irc:
|
||||
channels:
|
||||
- "irc.oftc.net#tor-ci"
|
||||
template:
|
||||
- "%{repository} %{branch} %{commit} - %{author}: %{commit_subject}"
|
||||
- "Build #%{build_number} %{result}. Details: %{build_url}"
|
||||
on_success: change
|
||||
on_failure: change
|
||||
email:
|
||||
on_success: never
|
||||
on_failure: change
|
@ -1,7 +0,0 @@
|
||||
The Tor Project is committed to fostering a inclusive community
|
||||
where people feel safe to engage, share their points of view, and
|
||||
participate. For the latest version of our Code of Conduct, please
|
||||
see
|
||||
|
||||
https://gitweb.torproject.org/community/policies.git/plain/code_of_conduct.txt
|
||||
|
39
CONTRIBUTING
39
CONTRIBUTING
@ -1,39 +0,0 @@
|
||||
Contributing to Tor
|
||||
-------------------
|
||||
|
||||
### Getting started
|
||||
|
||||
Welcome!
|
||||
|
||||
We have a bunch of documentation about how to develop Tor in the
|
||||
doc/HACKING/ directory. We recommend that you start with
|
||||
doc/HACKING/README.1st.md , and then go from there. It will tell
|
||||
you how to find your way around the source code, how to get
|
||||
involved with the Tor community, how to write patches, and much
|
||||
more!
|
||||
|
||||
You don't have to be a C developer to help with Tor: have a look
|
||||
at https://www.torproject.org/getinvolved/volunteer !
|
||||
|
||||
The Tor Project is committed to fostering a inclusive community
|
||||
where people feel safe to engage, share their points of view, and
|
||||
participate. For the latest version of our Code of Conduct, please
|
||||
see
|
||||
|
||||
https://gitweb.torproject.org/community/policies.git/plain/code_of_conduct.txt
|
||||
|
||||
|
||||
|
||||
### License issues
|
||||
|
||||
Tor is distributed under the license terms in the LICENSE -- in
|
||||
brief, the "3-clause BSD license". If you send us code to
|
||||
distribute with Tor, it needs to be code that we can distribute
|
||||
under those terms. Please don't send us patches unless you agree
|
||||
to allow this.
|
||||
|
||||
Some compatible licenses include:
|
||||
|
||||
- 3-clause BSD
|
||||
- 2-clause BSD
|
||||
- CC0 Public Domain Dedication
|
2511
Doxyfile.in
2511
Doxyfile.in
File diff suppressed because it is too large
Load Diff
18
INSTALL
18
INSTALL
@ -1,18 +0,0 @@
|
||||
|
||||
Most users who realize that INSTALL files still exist should simply
|
||||
follow the directions at
|
||||
https://www.torproject.org/docs/tor-doc-unix
|
||||
|
||||
If you got the source from git, run "./autogen.sh", which will
|
||||
run the various auto* programs. Then you can run ./configure, and
|
||||
refer to the above instructions.
|
||||
|
||||
If it doesn't build for you:
|
||||
|
||||
If you have problems finding libraries, try
|
||||
CPPFLAGS="-I/usr/local/include" LDFLAGS="-L/usr/local/lib" \
|
||||
./configure
|
||||
or
|
||||
./configure --with-libevent-dir=/usr/local
|
||||
rather than simply ./configure.
|
||||
|
389
LICENSE
389
LICENSE
@ -1,389 +0,0 @@
|
||||
This file contains the license for Tor,
|
||||
a free software project to provide anonymity on the Internet.
|
||||
|
||||
It also lists the licenses for other components used by Tor.
|
||||
|
||||
For more information about Tor, see https://www.torproject.org/.
|
||||
|
||||
If you got this file as a part of a larger bundle,
|
||||
there may be other license terms that you should be aware of.
|
||||
|
||||
===============================================================================
|
||||
Tor is distributed under the "3-clause BSD" license, a commonly used
|
||||
software license that means Tor is both free software and open source:
|
||||
|
||||
Copyright (c) 2001-2004, Roger Dingledine
|
||||
Copyright (c) 2004-2006, Roger Dingledine, Nick Mathewson
|
||||
Copyright (c) 2007-2019, The Tor Project, Inc.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
|
||||
* Neither the names of the copyright owners nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
===============================================================================
|
||||
src/ext/strlcat.c and src/ext/strlcpy.c by Todd C. Miller are licensed
|
||||
under the following license:
|
||||
|
||||
* Copyright (c) 1998 Todd C. Miller <Todd.Miller@courtesan.com>
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
|
||||
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
|
||||
* THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
===============================================================================
|
||||
src/ext/tor_queue.h is licensed under the following license:
|
||||
|
||||
* Copyright (c) 1991, 1993
|
||||
* The Regents of the University of California. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. Neither the name of the University nor the names of its contributors
|
||||
* may be used to endorse or promote products derived from this software
|
||||
* without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
|
||||
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
||||
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
|
||||
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
* SUCH DAMAGE.
|
||||
|
||||
===============================================================================
|
||||
src/ext/csiphash.c is licensed under the following license:
|
||||
|
||||
Copyright (c) 2013 Marek Majkowski <marek@popcount.org>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
===============================================================================
|
||||
Trunnel is distributed under this license:
|
||||
|
||||
Copyright 2014 The Tor Project, Inc.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
|
||||
* Neither the names of the copyright owners nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
===============================================================================
|
||||
getdelim.c is distributed under this license:
|
||||
|
||||
Copyright (c) 2011 The NetBSD Foundation, Inc.
|
||||
All rights reserved.
|
||||
|
||||
This code is derived from software contributed to The NetBSD Foundation
|
||||
by Christos Zoulas.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
|
||||
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
|
||||
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
|
||||
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
===============================================================================
|
||||
src/config/geoip and src/config/geoip6:
|
||||
|
||||
These files are based on the IPFire Location Database. For more
|
||||
information, see https://location.ipfire.org/.
|
||||
|
||||
The data is distributed under a creative commons "BY-SA 4.0" license.
|
||||
|
||||
Find the full license terms at:
|
||||
https://creativecommons.org/licenses/by-sa/4.0/
|
||||
|
||||
===============================================================================
|
||||
m4/pc_from_ucontext.m4 is available under the following license. Note that
|
||||
it is *not* built into the Tor software.
|
||||
|
||||
Copyright (c) 2005, Google Inc.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
===============================================================================
|
||||
m4/pkg.m4 is available under the following license. Note that
|
||||
it is *not* built into the Tor software.
|
||||
|
||||
pkg.m4 - Macros to locate and utilise pkg-config. -*- Autoconf -*-
|
||||
serial 1 (pkg-config-0.24)
|
||||
|
||||
Copyright © 2004 Scott James Remnant <scott@netsplit.com>.
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful, but
|
||||
WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program; if not, write to the Free Software
|
||||
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
|
||||
As a special exception to the GNU General Public License, if you
|
||||
distribute this file as part of a program that contains a
|
||||
configuration script generated by Autoconf, you may include it under
|
||||
the same distribution terms that you use for the rest of that program.
|
||||
===============================================================================
|
||||
src/ext/readpassphrase.[ch] are distributed under this license:
|
||||
|
||||
Copyright (c) 2000-2002, 2007 Todd C. Miller <Todd.Miller@courtesan.com>
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
Sponsored in part by the Defense Advanced Research Projects
|
||||
Agency (DARPA) and Air Force Research Laboratory, Air Force
|
||||
Materiel Command, USAF, under agreement number F39502-99-1-0512.
|
||||
|
||||
===============================================================================
|
||||
src/ext/mulodi4.c is distributed under this license:
|
||||
|
||||
=========================================================================
|
||||
compiler_rt License
|
||||
=========================================================================
|
||||
|
||||
The compiler_rt library is dual licensed under both the
|
||||
University of Illinois "BSD-Like" license and the MIT license.
|
||||
As a user of this code you may choose to use it under either
|
||||
license. As a contributor, you agree to allow your code to be
|
||||
used under both.
|
||||
|
||||
Full text of the relevant licenses is included below.
|
||||
|
||||
=========================================================================
|
||||
|
||||
University of Illinois/NCSA
|
||||
Open Source License
|
||||
|
||||
Copyright (c) 2009-2016 by the contributors listed in CREDITS.TXT
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Developed by:
|
||||
|
||||
LLVM Team
|
||||
|
||||
University of Illinois at Urbana-Champaign
|
||||
|
||||
http://llvm.org
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal with the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimers.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimers in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
* Neither the names of the LLVM Team, University of Illinois
|
||||
at Urbana-Champaign, nor the names of its contributors may
|
||||
be used to endorse or promote products derived from this
|
||||
Software without specific prior written permission.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS WITH THE SOFTWARE.
|
||||
|
||||
=========================================================================
|
||||
|
||||
Copyright (c) 2009-2015 by the contributors listed in CREDITS.TXT
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
=========================================================================
|
||||
Copyrights and Licenses for Third Party Software Distributed with LLVM:
|
||||
=========================================================================
|
||||
|
||||
The LLVM software contains code written by third parties. Such
|
||||
software will have its own individual LICENSE.TXT file in the
|
||||
directory in which it appears. This file will describe the
|
||||
copyrights, license, and restrictions which apply to that code.
|
||||
|
||||
The disclaimer of warranty in the University of Illinois Open
|
||||
Source License applies to all code in the LLVM Distribution, and
|
||||
nothing in any of the other licenses gives permission to use the
|
||||
names of the LLVM Team or the University of Illinois to endorse
|
||||
or promote products derived from this Software.
|
||||
|
||||
===============================================================================
|
||||
If you got Tor as a static binary with OpenSSL included, then you should know:
|
||||
"This product includes software developed by the OpenSSL Project
|
||||
for use in the OpenSSL Toolkit (http://www.openssl.org/)"
|
||||
===============================================================================
|
690
Makefile.am
690
Makefile.am
@ -1,690 +0,0 @@
|
||||
# Copyright (c) 2001-2004, Roger Dingledine
|
||||
# Copyright (c) 2004-2006, Roger Dingledine, Nick Mathewson
|
||||
# Copyright (c) 2007-2019, The Tor Project, Inc.
|
||||
# See LICENSE for licensing information
|
||||
|
||||
ACLOCAL_AMFLAGS = -I m4
|
||||
|
||||
noinst_LIBRARIES=
|
||||
EXTRA_DIST=
|
||||
noinst_HEADERS=
|
||||
bin_PROGRAMS=
|
||||
EXTRA_PROGRAMS=
|
||||
CLEANFILES=
|
||||
TESTS=
|
||||
noinst_PROGRAMS=
|
||||
DISTCLEANFILES=
|
||||
MOSTLYCLEANFILES=
|
||||
bin_SCRIPTS=
|
||||
AM_CPPFLAGS=\
|
||||
-I$(top_srcdir)/src \
|
||||
-I$(top_srcdir)/src/ext \
|
||||
-I$(top_srcdir)/src/ext/trunnel \
|
||||
-I$(top_srcdir)/src/trunnel
|
||||
|
||||
AM_CFLAGS= \
|
||||
@TOR_SYSTEMD_CFLAGS@ \
|
||||
@CFLAGS_BUGTRAP@ \
|
||||
@TOR_LZMA_CFLAGS@ \
|
||||
@TOR_ZSTD_CFLAGS@
|
||||
|
||||
SHELL=@SHELL@
|
||||
|
||||
if COVERAGE_ENABLED
|
||||
TESTING_TOR_BINARY=$(top_builddir)/src/app/tor-cov$(EXEEXT)
|
||||
else
|
||||
TESTING_TOR_BINARY=$(top_builddir)/src/app/tor$(EXEEXT)
|
||||
endif
|
||||
|
||||
# "Common" libraries used to link tor's utility code.
|
||||
TOR_UTIL_LIBS = \
|
||||
src/lib/libtor-geoip.a \
|
||||
src/lib/libtor-process.a \
|
||||
src/lib/libtor-buf.a \
|
||||
src/lib/libtor-confmgt.a \
|
||||
src/lib/libtor-pubsub.a \
|
||||
src/lib/libtor-dispatch.a \
|
||||
src/lib/libtor-time.a \
|
||||
src/lib/libtor-fs.a \
|
||||
src/lib/libtor-encoding.a \
|
||||
src/lib/libtor-sandbox.a \
|
||||
src/lib/libtor-container.a \
|
||||
src/lib/libtor-net.a \
|
||||
src/lib/libtor-thread.a \
|
||||
src/lib/libtor-memarea.a \
|
||||
src/lib/libtor-math.a \
|
||||
src/lib/libtor-meminfo.a \
|
||||
src/lib/libtor-osinfo.a \
|
||||
src/lib/libtor-log.a \
|
||||
src/lib/libtor-lock.a \
|
||||
src/lib/libtor-fdio.a \
|
||||
src/lib/libtor-string.a \
|
||||
src/lib/libtor-term.a \
|
||||
src/lib/libtor-smartlist-core.a \
|
||||
src/lib/libtor-malloc.a \
|
||||
src/lib/libtor-wallclock.a \
|
||||
src/lib/libtor-err.a \
|
||||
src/lib/libtor-version.a \
|
||||
src/lib/libtor-llharden.a \
|
||||
src/lib/libtor-intmath.a \
|
||||
src/lib/libtor-ctime.a \
|
||||
src/lib/libtor-metrics.a
|
||||
|
||||
# Variants of the above for linking the testing variant of tor (for coverage
|
||||
# and tests)
|
||||
if UNITTESTS_ENABLED
|
||||
TOR_UTIL_TESTING_LIBS = \
|
||||
src/lib/libtor-geoip-testing.a \
|
||||
src/lib/libtor-process-testing.a \
|
||||
src/lib/libtor-buf-testing.a \
|
||||
src/lib/libtor-confmgt-testing.a \
|
||||
src/lib/libtor-pubsub-testing.a \
|
||||
src/lib/libtor-dispatch-testing.a \
|
||||
src/lib/libtor-time-testing.a \
|
||||
src/lib/libtor-fs-testing.a \
|
||||
src/lib/libtor-encoding-testing.a \
|
||||
src/lib/libtor-sandbox-testing.a \
|
||||
src/lib/libtor-container-testing.a \
|
||||
src/lib/libtor-net-testing.a \
|
||||
src/lib/libtor-thread-testing.a \
|
||||
src/lib/libtor-memarea-testing.a \
|
||||
src/lib/libtor-math-testing.a \
|
||||
src/lib/libtor-meminfo-testing.a \
|
||||
src/lib/libtor-osinfo-testing.a \
|
||||
src/lib/libtor-term-testing.a \
|
||||
src/lib/libtor-log-testing.a \
|
||||
src/lib/libtor-lock-testing.a \
|
||||
src/lib/libtor-fdio-testing.a \
|
||||
src/lib/libtor-string-testing.a \
|
||||
src/lib/libtor-smartlist-core-testing.a \
|
||||
src/lib/libtor-malloc-testing.a \
|
||||
src/lib/libtor-wallclock-testing.a \
|
||||
src/lib/libtor-err-testing.a \
|
||||
src/lib/libtor-version-testing.a \
|
||||
src/lib/libtor-llharden-testing.a \
|
||||
src/lib/libtor-intmath.a \
|
||||
src/lib/libtor-ctime-testing.a \
|
||||
src/lib/libtor-metrics-testing.a
|
||||
endif
|
||||
|
||||
# Internal crypto libraries used in Tor
|
||||
TOR_CRYPTO_LIBS = \
|
||||
src/lib/libtor-tls.a \
|
||||
src/lib/libtor-crypt-ops.a \
|
||||
$(LIBKECCAK_TINY) \
|
||||
$(LIBDONNA)
|
||||
|
||||
if BUILD_MODULE_POW
|
||||
TOR_CRYPTO_LIBS += $(EQUIX_LIBS)
|
||||
endif
|
||||
|
||||
# Variants of the above for linking the testing variant of tor (for coverage
|
||||
# and tests)
|
||||
if UNITTESTS_ENABLED
|
||||
TOR_CRYPTO_TESTING_LIBS = \
|
||||
src/lib/libtor-tls-testing.a \
|
||||
src/lib/libtor-crypt-ops-testing.a \
|
||||
$(LIBKECCAK_TINY) \
|
||||
$(LIBDONNA) \
|
||||
$(EQUIX_LIBS)
|
||||
endif
|
||||
|
||||
# All static libraries used to link tor.
|
||||
TOR_INTERNAL_LIBS = \
|
||||
src/core/libtor-app.a \
|
||||
src/lib/libtor-compress.a \
|
||||
src/lib/libtor-evloop.a \
|
||||
$(TOR_CRYPTO_LIBS) \
|
||||
$(TOR_UTIL_LIBS) \
|
||||
src/trunnel/libor-trunnel.a \
|
||||
src/lib/libtor-trace.a
|
||||
|
||||
libtor.a: $(TOR_INTERNAL_LIBS)
|
||||
$(AM_V_AR) export AR="$(AR)"; \
|
||||
export ARFLAGS="$(ARFLAGS)"; \
|
||||
export RANLIB="$(RANLIB)"; \
|
||||
$(top_srcdir)/scripts/build/combine_libs libtor.a $(TOR_INTERNAL_LIBS)
|
||||
|
||||
MOSTLYCLEANFILES += libtor.a
|
||||
|
||||
# Variants of the above for linking the testing variant of tor (for coverage
|
||||
# and tests)
|
||||
if UNITTESTS_ENABLED
|
||||
TOR_INTERNAL_TESTING_LIBS = \
|
||||
src/core/libtor-app-testing.a \
|
||||
src/lib/libtor-compress-testing.a \
|
||||
src/lib/libtor-evloop-testing.a \
|
||||
$(TOR_CRYPTO_TESTING_LIBS) \
|
||||
$(TOR_UTIL_TESTING_LIBS) \
|
||||
src/trunnel/libor-trunnel-testing.a \
|
||||
src/lib/libtor-trace.a
|
||||
|
||||
src/test/libtor-testing.a: $(TOR_INTERNAL_TESTING_LIBS)
|
||||
$(AM_V_AR) export AR="$(AR)"; \
|
||||
export ARFLAGS="$(ARFLAGS)"; \
|
||||
export RANLIB="$(RANLIB)"; \
|
||||
$(top_srcdir)/scripts/build/combine_libs src/test/libtor-testing.a $(TOR_INTERNAL_TESTING_LIBS)
|
||||
|
||||
MOSTLYCLEANFILES += src/test/libtor-testing.a
|
||||
endif
|
||||
|
||||
TOR_LDFLAGS_CRYPTLIB=@TOR_LDFLAGS_openssl@
|
||||
TOR_LIBS_CRYPTLIB=@TOR_OPENSSL_LIBS@
|
||||
TOR_CFLAGS_CRYPTLIB=
|
||||
if USE_NSS
|
||||
TOR_CFLAGS_CRYPTLIB+=@NSS_CFLAGS@
|
||||
TOR_LIBS_CRYPTLIB+=@NSS_LIBS@
|
||||
endif
|
||||
|
||||
# All libraries used to link tor-cov
|
||||
|
||||
include src/include.am
|
||||
include doc/include.am
|
||||
include contrib/include.am
|
||||
|
||||
manpages: $(nodist_man1_MANS)
|
||||
|
||||
EXTRA_DIST+= \
|
||||
ChangeLog \
|
||||
CONTRIBUTING \
|
||||
CODE_OF_CONDUCT \
|
||||
INSTALL \
|
||||
LICENSE \
|
||||
README.md \
|
||||
ReleaseNotes \
|
||||
scripts/build/combine_libs \
|
||||
scripts/maint/checkIncludes.py \
|
||||
scripts/maint/checkSpace.pl \
|
||||
scripts/maint/checkSpaceTest.sh \
|
||||
scripts/maint/checkspace_tests/dubious.c \
|
||||
scripts/maint/checkspace_tests/dubious.h \
|
||||
scripts/maint/checkspace_tests/expected.txt \
|
||||
scripts/maint/checkspace_tests/good_guard.h \
|
||||
scripts/maint/checkspace_tests/same_guard.h \
|
||||
scripts/maint/checkspace_tests/subdir/dubious.c \
|
||||
scripts/maint/checkShellScripts.sh \
|
||||
scripts/maint/practracker/README \
|
||||
scripts/maint/practracker/exceptions.txt \
|
||||
scripts/maint/practracker/includes.py \
|
||||
scripts/maint/practracker/metrics.py \
|
||||
scripts/maint/practracker/practracker.py \
|
||||
scripts/maint/practracker/practracker_tests.py \
|
||||
scripts/maint/practracker/problem.py \
|
||||
scripts/maint/practracker/testdata/.may_include \
|
||||
scripts/maint/practracker/testdata/a.c \
|
||||
scripts/maint/practracker/testdata/b.c \
|
||||
scripts/maint/practracker/testdata/ex0-expected.txt \
|
||||
scripts/maint/practracker/testdata/ex0.txt \
|
||||
scripts/maint/practracker/testdata/ex1-expected.txt \
|
||||
scripts/maint/practracker/testdata/ex1.txt \
|
||||
scripts/maint/practracker/testdata/ex1-overbroad-expected.txt \
|
||||
scripts/maint/practracker/testdata/ex1-regen-expected.txt \
|
||||
scripts/maint/practracker/testdata/ex1-regen-overbroad-expected.txt \
|
||||
scripts/maint/practracker/testdata/ex.txt \
|
||||
scripts/maint/practracker/testdata/header.h \
|
||||
scripts/maint/practracker/testdata/not_c_file \
|
||||
scripts/maint/practracker/test_practracker.sh \
|
||||
scripts/maint/practracker/util.py \
|
||||
scripts/coccinelle/apply.sh \
|
||||
scripts/coccinelle/check_cocci_parse.sh \
|
||||
scripts/coccinelle/exceptions.txt \
|
||||
scripts/coccinelle/test-operator-cleanup \
|
||||
scripts/coccinelle/tor-coccinelle.h \
|
||||
scripts/coccinelle/try_parse.sh
|
||||
|
||||
## This tells etags how to find mockable function definitions.
|
||||
AM_ETAGSFLAGS=--regex='{c}/MOCK_IMPL([^,]+,\W*\([a-zA-Z0-9_]+\)\W*,/\1/s'
|
||||
|
||||
if COVERAGE_ENABLED
|
||||
TEST_CFLAGS=-fno-inline -fprofile-arcs -ftest-coverage
|
||||
if DISABLE_ASSERTS_IN_UNIT_TESTS
|
||||
TEST_CPPFLAGS=-DTOR_UNIT_TESTS -DTOR_COVERAGE -DDISABLE_ASSERTS_IN_UNIT_TESTS @TOR_MODULES_ALL_ENABLED@
|
||||
else
|
||||
TEST_CPPFLAGS=-DTOR_UNIT_TESTS -DTOR_COVERAGE @TOR_MODULES_ALL_ENABLED@
|
||||
endif
|
||||
TEST_NETWORK_FLAGS=--coverage --hs-multi-client 1
|
||||
else
|
||||
TEST_CFLAGS=
|
||||
TEST_CPPFLAGS=-DTOR_UNIT_TESTS @TOR_MODULES_ALL_ENABLED@
|
||||
TEST_NETWORK_FLAGS=--hs-multi-client 1
|
||||
endif
|
||||
TEST_NETWORK_SHOW_WARNINGS_FOR_LAST_RUN_FLAGS=--quiet --only-warnings
|
||||
|
||||
if LIBFUZZER_ENABLED
|
||||
TEST_CFLAGS += -fsanitize=fuzzer-no-link
|
||||
# not "edge"
|
||||
endif
|
||||
|
||||
TEST_NETWORK_ALL_LOG_DIR=$(top_builddir)/test_network_log
|
||||
TEST_NETWORK_ALL_DRIVER_FLAGS=--color-tests yes
|
||||
|
||||
#install-data-local:
|
||||
# $(INSTALL) -m 755 -d $(LOCALSTATEDIR)/lib/tor
|
||||
|
||||
# Allows to override rpmbuild with rpmbuild-md5 from fedora-packager so that
|
||||
# building for EL5 won't fail on https://bugzilla.redhat.com/show_bug.cgi?id=490613
|
||||
RPMBUILD ?= rpmbuild
|
||||
|
||||
# Use automake's dist-gzip target to build the tarball
|
||||
dist-rpm: dist-gzip
|
||||
TIMESTAMP=$$(date +"%Y-%m-%d_%H.%M.%S"); \
|
||||
RPM_BUILD_DIR=$$(mktemp -d "/tmp/tor-rpm-build-$$TIMESTAMP-XXXX"); \
|
||||
mkdir -p "$$RPM_BUILD_DIR"/{BUILD,RPMS,SOURCES/"tor-$(VERSION)",SPECS,SRPMS}; \
|
||||
cp -fa "$(distdir).tar.gz" "$$RPM_BUILD_DIR"/SOURCES/; \
|
||||
LIBS=-lrt $(RPMBUILD) -ba --define "_topdir $$RPM_BUILD_DIR" tor.spec; \
|
||||
cp -fa "$$RPM_BUILD_DIR"/SRPMS/* .; \
|
||||
cp -fa "$$RPM_BUILD_DIR"/RPMS/* .; \
|
||||
rm -rf "$$RPM_BUILD_DIR"; \
|
||||
echo "RPM build finished"; \
|
||||
#end of dist-rpm
|
||||
|
||||
.PHONY: doxygen
|
||||
doxygen: Doxyfile
|
||||
mkdir -p doc/doxygen
|
||||
(cd "$(top_srcdir)" && doxygen "$(abs_top_builddir)/Doxyfile")
|
||||
|
||||
test: all
|
||||
$(top_builddir)/src/test/test
|
||||
|
||||
shellcheck:
|
||||
$(top_srcdir)/scripts/maint/checkShellScripts.sh
|
||||
|
||||
check-local: \
|
||||
check-spaces \
|
||||
check-changes \
|
||||
check-includes \
|
||||
shellcheck
|
||||
|
||||
# test-network requires a copy of Chutney in $CHUTNEY_PATH.
|
||||
# Chutney can be cloned from https://gitlab.torproject.org/tpo/core/chutney.git .
|
||||
.PHONY: need-chutney-path
|
||||
need-chutney-path:
|
||||
@if test ! -d "$$CHUTNEY_PATH"; then \
|
||||
echo '$$CHUTNEY_PATH was not set.'; \
|
||||
if test -d "$(top_srcdir)/../chutney" && \
|
||||
test -x "$(top_srcdir)/../chutney/chutney"; then \
|
||||
echo "Assuming test-network.sh will find" \
|
||||
"$(top_srcdir)/../chutney"; \
|
||||
else \
|
||||
echo; \
|
||||
echo "To run these tests," \
|
||||
"git clone" \
|
||||
"https://gitlab.torproject.org/tpo/core/chutney.git ;" \
|
||||
"export CHUTNEY_PATH=\`pwd\`/chutney"; \
|
||||
exit 1; \
|
||||
fi \
|
||||
fi
|
||||
|
||||
# Run some basic tests using automake's test-driver
|
||||
.PHONY: test-network
|
||||
# Hide directory path logs from submakes using $(MAKE) -s
|
||||
test-network:
|
||||
@$(MAKE) -s test-network-mkdir
|
||||
@$(MAKE) -s test-network-clean
|
||||
@$(MAKE) -s test-network-run \
|
||||
ipv4_flavors="$(TEST_CHUTNEY_FLAVOR_QUICK)" \
|
||||
ipv6_flavors="$(TEST_CHUTNEY_FLAVOR_QUICK_IPV6)"
|
||||
@$(MAKE) -s test-network-results
|
||||
|
||||
# Run all available tests using automake's test-driver
|
||||
.PHONY: test-network-all
|
||||
# Hide directory path logs from submakes using $(MAKE) -s
|
||||
test-network-all:
|
||||
@$(MAKE) -s test-network-mkdir
|
||||
@$(MAKE) -s test-network-clean
|
||||
@$(MAKE) -s test-network-run \
|
||||
ipv4_flavors="$(TEST_CHUTNEY_FLAVORS)" \
|
||||
mixed_flavors="$(TEST_CHUTNEY_FLAVORS_MIXED)" \
|
||||
ipv6_flavors="$(TEST_CHUTNEY_FLAVORS_IPV6)" \
|
||||
ipv6_mixed_flavors="$(TEST_CHUTNEY_FLAVORS_IPV6_MIXED)"
|
||||
@$(MAKE) -s test-network-results
|
||||
|
||||
# Run IPv4 and mixed tests using automake's test-driver
|
||||
.PHONY: test-network-ipv4
|
||||
# Hide directory path logs from submakes using $(MAKE) -s
|
||||
test-network-ipv4:
|
||||
@$(MAKE) -s test-network-mkdir
|
||||
@$(MAKE) -s test-network-clean
|
||||
@$(MAKE) -s test-network-run \
|
||||
ipv4_flavors="$(TEST_CHUTNEY_FLAVORS)" \
|
||||
mixed_flavors="$(TEST_CHUTNEY_FLAVORS_MIXED)"
|
||||
@$(MAKE) -s test-network-results
|
||||
|
||||
# Run IPv6 tests using automake's test-driver
|
||||
.PHONY: test-network-ipv6
|
||||
# Hide directory path logs from submakes using $(MAKE) -s
|
||||
test-network-ipv6:
|
||||
@$(MAKE) -s test-network-mkdir
|
||||
@$(MAKE) -s test-network-clean
|
||||
@$(MAKE) -s test-network-run \
|
||||
ipv6_flavors="$(TEST_CHUTNEY_FLAVORS_IPV6)" \
|
||||
ipv6_mixed_flavors="$(TEST_CHUTNEY_FLAVORS_IPV6_MIXED)"
|
||||
@$(MAKE) -s test-network-results
|
||||
|
||||
# Make the test network log directory, if it does not exist
|
||||
.PHONY: test-network-mkdir
|
||||
test-network-mkdir:
|
||||
@mkdir -p "$(TEST_NETWORK_ALL_LOG_DIR)"
|
||||
|
||||
# Clean the test network log directory
|
||||
.PHONY: test-network-clean
|
||||
# We need to remove all matching files, so we can't quote the glob part of the
|
||||
# rm arguments
|
||||
test-network-clean:
|
||||
rm -f "$(TEST_NETWORK_ALL_LOG_DIR)"/*.log \
|
||||
"$(TEST_NETWORK_ALL_LOG_DIR)"/*.trs
|
||||
|
||||
# Run tests using automake's test-driver
|
||||
# When checking if a set of test can be run, log the type of test, and the
|
||||
# list of tests that will be run (or skipped).
|
||||
#
|
||||
# Run the IPv4 tests in $(ipv4_flavors), unconditionally
|
||||
# - tor relays and directory authorities require IPv4.
|
||||
# Run the IPv6 tests in $(ipv6_flavors), if IPv6 is available
|
||||
# - only run IPv6 tests if we can ping6 or ping -6 ::1 (localhost)
|
||||
# we try the syntax for BSD ping6, Linux ping6, and Linux ping -6,
|
||||
# because they're incompatible
|
||||
# - some IPv6 tests may fail without an IPv6 DNS server
|
||||
# (see #16971 and #17011)
|
||||
# Run the mixed tests in $(mixed_flavors), if a tor-stable binary is available
|
||||
# - only run mixed tests if we have a tor-stable binary
|
||||
# Run the IPv6 mixed tests in $(ipv6_mixed_flavors), if IPv6 and mixed are run
|
||||
# - see above for details about IPv6 and mixed
|
||||
.PHONY: test-network-run
|
||||
# We need the word splitting in the "for" lines, so we can't quote
|
||||
# $(skip_flavors) or $(flavors)
|
||||
test-network-run: need-chutney-path test-driver $(TESTING_TOR_BINARY) src/tools/tor-gencert
|
||||
@flavors=""; \
|
||||
skip_flavors=""; \
|
||||
if test -n "$(ipv4_flavors)"; then \
|
||||
echo "Running IPv4 flavors: $(ipv4_flavors)."; \
|
||||
flavors="$$flavors $(ipv4_flavors)"; \
|
||||
fi; \
|
||||
test_network_ipv6=false; \
|
||||
if test -n "$(ipv6_flavors)" || \
|
||||
test -n "$(ipv6_mixed_flavors)"; then \
|
||||
if ping6 -q -c 1 -o ::1 >/dev/null 2>&1 || \
|
||||
ping6 -q -c 1 -W 1 ::1 >/dev/null 2>&1 || \
|
||||
ping -6 -c 1 -W 1 ::1 >/dev/null 2>&1; then \
|
||||
test_network_ipv6=true; \
|
||||
fi; \
|
||||
fi; \
|
||||
if test -n "$(ipv6_flavors)"; then \
|
||||
if test "$$test_network_ipv6" = "true"; then \
|
||||
echo "ping6 ::1 or ping ::1 succeeded, running IPv6" \
|
||||
"flavors: $(ipv6_flavors)."; \
|
||||
flavors="$$flavors $(ipv6_flavors)"; \
|
||||
else \
|
||||
echo "ping6 ::1 and ping ::1 failed, skipping IPv6" \
|
||||
"flavors: $(ipv6_flavors)."; \
|
||||
skip_flavors="$$skip_flavors $(ipv6_flavors)"; \
|
||||
fi; \
|
||||
fi; \
|
||||
test_network_mixed=false; \
|
||||
if test -n "$(mixed_flavors)" || \
|
||||
test -n "$(ipv6_mixed_flavors)"; then \
|
||||
if command -v tor-stable >/dev/null 2>&1; then \
|
||||
test_network_mixed=true; \
|
||||
fi; \
|
||||
fi; \
|
||||
if test -n "$(mixed_flavors)"; then \
|
||||
if test "$$test_network_mixed" = "true"; then \
|
||||
echo "tor-stable found, running mixed flavors:" \
|
||||
"$(mixed_flavors)."; \
|
||||
flavors="$$flavors $(mixed_flavors)"; \
|
||||
else \
|
||||
echo "tor-stable not found, skipping mixed flavors:" \
|
||||
"$(mixed_flavors)."; \
|
||||
skip_flavors="$$skip_flavors $(mixed_flavors)"; \
|
||||
fi; \
|
||||
fi; \
|
||||
if test -n "$(ipv6_mixed_flavors)"; then \
|
||||
if test "$$test_network_ipv6" = "true" && \
|
||||
test "$$test_network_mixed" = "true"; then \
|
||||
echo "Running IPv6 mixed flavors:" \
|
||||
"$(ipv6_mixed_flavors)."; \
|
||||
flavors="$$flavors $(ipv6_mixed_flavors)"; \
|
||||
else \
|
||||
echo "Skipping IPv6 mixed flavors:" \
|
||||
"$(ipv6_mixed_flavors)."; \
|
||||
skip_flavors="$$skip_flavors $(ipv6_mixed_flavors)"; \
|
||||
fi; \
|
||||
fi; \
|
||||
for f in $$skip_flavors; do \
|
||||
echo "SKIP: $$f"; \
|
||||
done; \
|
||||
for f in $$flavors; do \
|
||||
$(SHELL) "$(top_srcdir)/test-driver" --test-name "$$f" \
|
||||
--log-file "$(TEST_NETWORK_ALL_LOG_DIR)/$$f.log" \
|
||||
--trs-file "$(TEST_NETWORK_ALL_LOG_DIR)/$$f.trs" \
|
||||
$(TEST_NETWORK_ALL_DRIVER_FLAGS) \
|
||||
"$(top_srcdir)/src/test/test-network.sh" \
|
||||
--flavor "$$f" $(TEST_NETWORK_FLAGS); \
|
||||
"$(top_srcdir)/src/test/test-network.sh" \
|
||||
$(TEST_NETWORK_SHOW_WARNINGS_FOR_LAST_RUN_FLAGS); \
|
||||
done
|
||||
|
||||
# Print the results from automake's test-driver
|
||||
# - show tor warnings on the console after each network run
|
||||
# (otherwise, warnings go to the logs, and people don't see them unless
|
||||
# there is a network failure)
|
||||
.PHONY: test-network-results
|
||||
# We need to grep all matching files, so we can't quote the glob part of the
|
||||
# grep arguments
|
||||
test-network-results:
|
||||
@echo \
|
||||
"Log and result files are available in $(TEST_NETWORK_ALL_LOG_DIR)."
|
||||
@! grep -q FAIL "$(TEST_NETWORK_ALL_LOG_DIR)"/*.trs
|
||||
|
||||
need-stem-path:
|
||||
@if test ! -d "$$STEM_SOURCE_DIR"; then \
|
||||
echo '$$STEM_SOURCE_DIR was not set.'; echo; \
|
||||
echo "To run these tests, git clone https://gitlab.torproject.org/tpo/network-health/stem ; export STEM_SOURCE_DIR=\`pwd\`/stem"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
test-stem: need-stem-path $(TESTING_TOR_BINARY)
|
||||
@$(PYTHON) "$$STEM_SOURCE_DIR"/run_tests.py --tor "$(TESTING_TOR_BINARY)" --integ --test control.controller --test control.base_controller --test process --log notice;
|
||||
|
||||
test-stem-full: need-stem-path $(TESTING_TOR_BINARY)
|
||||
@$(PYTHON) "$$STEM_SOURCE_DIR"/run_tests.py --tor "$(TESTING_TOR_BINARY)" --all --log notice --target RUN_ALL,ONLINE -v;
|
||||
|
||||
test-full: \
|
||||
need-stem-path \
|
||||
need-chutney-path \
|
||||
check \
|
||||
test-network \
|
||||
test-stem
|
||||
|
||||
test-full-online: \
|
||||
need-stem-path \
|
||||
need-chutney-path \
|
||||
check \
|
||||
test-network \
|
||||
test-stem-full
|
||||
|
||||
# We can't delete the gcno files, because they are created when tor is compiled
|
||||
reset-gcov:
|
||||
rm -f $(top_builddir)/src/*/*.gcda $(top_builddir)/src/*/*/*.gcda \
|
||||
$(top_builddir)/src/*/*.gcov $(top_builddir)/src/*/*/*.gcov
|
||||
|
||||
HTML_COVER_DIR=$(top_builddir)/coverage_html
|
||||
coverage-html: all
|
||||
if COVERAGE_ENABLED
|
||||
test -e "`which lcov`" || (echo "lcov must be installed. See <http://ltp.sourceforge.net/coverage/lcov.php>." && false)
|
||||
test -d "$(HTML_COVER_DIR)" || $(MKDIR_P) "$(HTML_COVER_DIR)"
|
||||
lcov --rc lcov_branch_coverage=1 --directory $(top_builddir)/src --zerocounters
|
||||
$(MAKE) reset-gcov
|
||||
$(MAKE) check
|
||||
lcov --capture --rc lcov_branch_coverage=1 --no-external --directory $(top_builddir) --base-directory $(top_srcdir) --output-file "$(HTML_COVER_DIR)/lcov.tmp"
|
||||
lcov --remove "$(HTML_COVER_DIR)/lcov.tmp" --rc lcov_branch_coverage=1 'test/*' 'ext/tinytest*' '/usr/*' --output-file "$(HTML_COVER_DIR)/lcov.info"
|
||||
genhtml --branch-coverage -o "$(HTML_COVER_DIR)" "$(HTML_COVER_DIR)/lcov.info"
|
||||
else
|
||||
@printf "Not configured with --enable-coverage, run ./configure --enable-coverage\n"
|
||||
endif
|
||||
|
||||
coverage-html-full: all
|
||||
test -e "`which lcov`" || (echo "lcov must be installed. See <http://ltp.sourceforge.net/coverage/lcov.php>." && false)
|
||||
test -d "$(HTML_COVER_DIR)" || mkdir -p "$(HTML_COVER_DIR)"
|
||||
lcov --rc lcov_branch_coverage=1 --directory ./src --zerocounters
|
||||
$(MAKE) reset-gcov
|
||||
$(MAKE) check
|
||||
$(MAKE) test-stem-full
|
||||
CHUTNEY_TOR=tor-cov CHUTNEY_TOR_GENCERT=tor-cov-gencert $(top_srcdir)/src/test/test-network.sh
|
||||
CHUTNEY_TOR=tor-cov CHUTNEY_TOR_GENCERT=tor-cov-gencert $(top_srcdir)/src/test/test-network.sh --flavor hs
|
||||
lcov --capture --rc lcov_branch_coverage=1 --no-external --directory . --output-file "$(HTML_COVER_DIR)/lcov.tmp"
|
||||
lcov --remove "$(HTML_COVER_DIR)/lcov.tmp" --rc lcov_branch_coverage=1 'test/*' 'ext/tinytest*' '/usr/*' --output-file "$(HTML_COVER_DIR)/lcov.info"
|
||||
genhtml --branch-coverage -o "$(HTML_COVER_DIR)" "$(HTML_COVER_DIR)/lcov.info"
|
||||
|
||||
# For scripts: avoid src/ext and src/trunnel.
|
||||
# Keep these lists consistent:
|
||||
# - OWNED_TOR_C_FILES in Makefile.am
|
||||
# - CHECK_FILES in pre-commit.git-hook and pre-push.git-hook
|
||||
# - try_parse in check_cocci_parse.sh
|
||||
OWNED_TOR_C_FILES=\
|
||||
$(top_srcdir)/src/lib/*/*.[ch] \
|
||||
$(top_srcdir)/src/core/*/*.[ch] \
|
||||
$(top_srcdir)/src/feature/*/*.[ch] \
|
||||
$(top_srcdir)/src/app/*/*.[ch] \
|
||||
$(top_srcdir)/src/test/*.[ch] \
|
||||
$(top_srcdir)/src/test/*/*.[ch] \
|
||||
$(top_srcdir)/src/tools/*.[ch]
|
||||
|
||||
check-spaces:
|
||||
if USE_PERL
|
||||
$(PERL) $(top_srcdir)/scripts/maint/checkSpace.pl -C \
|
||||
$(OWNED_TOR_C_FILES)
|
||||
endif
|
||||
|
||||
check-includes:
|
||||
if USEPYTHON
|
||||
$(PYTHON) $(top_srcdir)/scripts/maint/practracker/includes.py $(top_srcdir)
|
||||
endif
|
||||
|
||||
check-best-practices:
|
||||
if USEPYTHON
|
||||
@$(PYTHON) $(top_srcdir)/scripts/maint/practracker/practracker.py $(top_srcdir) $(TOR_PRACTRACKER_OPTIONS)
|
||||
endif
|
||||
|
||||
check-cocci:
|
||||
VERBOSE=1 $(top_srcdir)/scripts/coccinelle/check_cocci_parse.sh $(OWNED_TOR_C_FILES)
|
||||
|
||||
practracker-regen:
|
||||
$(PYTHON) $(top_srcdir)/scripts/maint/practracker/practracker.py --regen $(top_srcdir)
|
||||
|
||||
check-docs: all
|
||||
$(PERL) $(top_builddir)/scripts/maint/checkOptionDocs.pl
|
||||
|
||||
check-logs:
|
||||
$(top_srcdir)/scripts/maint/checkLogs.pl \
|
||||
$(top_srcdir)/src/*/*.[ch] | sort -n
|
||||
|
||||
.PHONY: check-typos
|
||||
check-typos:
|
||||
@if test -x "`which misspell 2>&1;true`"; then \
|
||||
echo "Checking for Typos ..."; \
|
||||
(misspell \
|
||||
$(top_srcdir)/src/[^e]*/*.[ch] \
|
||||
$(top_srcdir)/doc \
|
||||
$(top_srcdir)/contrib \
|
||||
$(top_srcdir)/scripts \
|
||||
$(top_srcdir)/README.md \
|
||||
$(top_srcdir)/ChangeLog \
|
||||
$(top_srcdir)/INSTALL \
|
||||
$(top_srcdir)/ReleaseNotes \
|
||||
$(top_srcdir)/LICENSE); \
|
||||
else \
|
||||
echo "Tor can use misspell to check for typos."; \
|
||||
echo "It seems that you don't have misspell installed."; \
|
||||
echo "You can install the latest version of misspell here: https://github.com/client9/misspell#install"; \
|
||||
fi
|
||||
|
||||
.PHONY: check-changes
|
||||
check-changes:
|
||||
if USEPYTHON
|
||||
@if test -d "$(top_srcdir)/changes"; then \
|
||||
PACKAGE_VERSION=$(PACKAGE_VERSION) $(PYTHON) $(top_srcdir)/scripts/maint/lintChanges.py $(top_srcdir)/changes; \
|
||||
fi
|
||||
endif
|
||||
|
||||
.PHONY: update-versions
|
||||
update-versions:
|
||||
abs_top_srcdir="$(abs_top_srcdir)" $(PYTHON) $(top_srcdir)/scripts/maint/update_versions.py
|
||||
|
||||
.PHONY: callgraph
|
||||
callgraph:
|
||||
cd $(top_builddir); $(abs_top_srcdir)/scripts/maint/run_calltool.sh
|
||||
|
||||
version:
|
||||
@echo "Tor @VERSION@"
|
||||
@if test -d "$(top_srcdir)/.git" && test -x "`which git 2>&1;true`"; then \
|
||||
echo -n "git: " ;\
|
||||
(cd "$(top_srcdir)" && git rev-parse --short=16 HEAD); \
|
||||
fi
|
||||
|
||||
.PHONY: autostyle-ifdefs
|
||||
autostyle-ifdefs:
|
||||
$(PYTHON) $(top_srcdir)/scripts/maint/annotate_ifdef_directives.py $(OWNED_TOR_C_FILES)
|
||||
|
||||
.PHONY: autostyle-ifdefs
|
||||
autostyle-operators:
|
||||
$(PERL) $(top_srcdir)/scripts/coccinelle/test-operator-cleanup $(OWNED_TOR_C_FILES)
|
||||
|
||||
.PHONY: rectify-includes
|
||||
rectify-includes:
|
||||
cd $(top_srcdir); $(PYTHON) $(abs_top_srcdir)/scripts/maint/rectify_include_paths.py
|
||||
|
||||
.PHONY: update-copyright
|
||||
update-copyright:
|
||||
$(PERL) $(top_srcdir)/scripts/maint/updateCopyright.pl $(OWNED_TOR_C_FILES)
|
||||
|
||||
.PHONY: autostyle
|
||||
autostyle: update-versions autostyle-ifdefs rectify-includes
|
||||
|
||||
mostlyclean-local:
|
||||
rm -f $(top_builddir)/src/*/*.gc{da,no} $(top_builddir)/src/*/*/*.gc{da,no}
|
||||
rm -rf $(HTML_COVER_DIR)
|
||||
rm -rf $(top_builddir)/doc/doxygen
|
||||
rm -rf $(TEST_NETWORK_ALL_LOG_DIR)
|
||||
|
||||
# This relies on some internal details of how automake implements
|
||||
# distcheck. We check two directories because automake-1.15 changed
|
||||
# from $(distdir)/_build to $(distdir)/_build/sub.
|
||||
show-distdir-testlog:
|
||||
@if test -d "$(distdir)/_build/sub"; then \
|
||||
cat $(distdir)/_build/sub/$(TEST_SUITE_LOG); \
|
||||
else \
|
||||
cat $(distdir)/_build/$(TEST_SUITE_LOG); fi
|
||||
|
||||
# Similarly, this relies on automake internals to run file on an
|
||||
# intermittent core file whose provenance is not known to us. See
|
||||
# ticket 26787.
|
||||
show-distdir-core:
|
||||
@if test -d "$(distdir)/_build/sub"; then \
|
||||
file $(distdir)/_build/sub/core ; \
|
||||
else \
|
||||
file $(distdir)/_build/core; fi
|
||||
|
||||
show-libs:
|
||||
@echo libtor.a
|
||||
|
||||
show-testing-libs:
|
||||
@echo src/test/libtor-testing.a
|
||||
|
||||
# Note here that we hardcode this -j2 because if the user would pass too many
|
||||
# cores, bear actually chockes and dies :S. For this to work, a make clean
|
||||
# needs to be done else bear will miss some compile flags.
|
||||
lsp:
|
||||
@if test -x "`which bear 2>&1;true`"; then \
|
||||
echo "Generating LSP compile_commands.json. Might take few minutes..."; \
|
||||
$(MAKE) clean 2>&1 >/dev/null; \
|
||||
bear >/dev/null 2>&1 -- $(MAKE) -j2 2>&1 >/dev/null; \
|
||||
echo "Generating .ccls file..."; \
|
||||
./scripts/maint/gen_ccls_file.sh \
|
||||
else \
|
||||
echo "No bear command found. On debian, apt install bear"; \
|
||||
fi
|
||||
|
||||
# Reproducible tarball. We change the tar options for this.
|
||||
dist-reprod:
|
||||
$(MAKE) dist am__tar="$${TAR-tar} --format=gnu --owner=root --group=root --sort=name --mtime=\"`git show --no-patch --format='%ci'`\" -chof - $(distdir)"
|
53
README.md
53
README.md
@ -1,55 +1,12 @@
|
||||
This project is no longer hosted here. Come find us at [our gitlab
|
||||
repository](https://gitlab.torproject.org/tpo/core/tor) instead.
|
||||
|
||||
## About
|
||||
|
||||
Tor protects your privacy on the internet by hiding the connection between
|
||||
your Internet address and the services you use. We believe Tor is reasonably
|
||||
secure, but please ensure you read the instructions and configure it properly.
|
||||
|
||||
## Build
|
||||
|
||||
To build Tor from source:
|
||||
|
||||
```
|
||||
./configure
|
||||
make
|
||||
make install
|
||||
```
|
||||
|
||||
To build Tor from a just-cloned git repository:
|
||||
|
||||
```
|
||||
./autogen.sh
|
||||
./configure
|
||||
make
|
||||
make install
|
||||
```
|
||||
|
||||
## Releases
|
||||
|
||||
The tarballs, checksums and signatures can be found here: https://dist.torproject.org
|
||||
|
||||
- Checksum: `<tarball-name>.sha256sum`
|
||||
- Signatures: `<tarball-name>.sha256sum.asc`
|
||||
|
||||
### Schedule
|
||||
|
||||
You can find our release schedule here:
|
||||
|
||||
- https://gitlab.torproject.org/tpo/core/team/-/wikis/NetworkTeam/CoreTorReleases
|
||||
|
||||
### Keys that CAN sign a release
|
||||
|
||||
The following keys are the maintainers of this repository. One or many of
|
||||
these keys can sign the releases, do NOT expect them all:
|
||||
|
||||
- Alexander Færøy:
|
||||
[514102454D0A87DB0767A1EBBE6A0531C18A9179](https://keys.openpgp.org/vks/v1/by-fingerprint/1C1BC007A9F607AA8152C040BEA7B180B1491921)
|
||||
- David Goulet:
|
||||
[B74417EDDF22AC9F9E90F49142E86A2A11F48D36](https://keys.openpgp.org/vks/v1/by-fingerprint/B74417EDDF22AC9F9E90F49142E86A2A11F48D36)
|
||||
- Nick Mathewson:
|
||||
[2133BC600AB133E1D826D173FE43009C4607B1FB](https://keys.openpgp.org/vks/v1/by-fingerprint/2133BC600AB133E1D826D173FE43009C4607B1FB)
|
||||
|
||||
## Development
|
||||
|
||||
See our hacking documentation in [doc/HACKING/](./doc/HACKING).
|
||||
|
||||
## Resources
|
||||
|
||||
Home page:
|
||||
|
29309
ReleaseNotes
29309
ReleaseNotes
File diff suppressed because it is too large
Load Diff
288
acinclude.m4
288
acinclude.m4
@ -1,288 +0,0 @@
|
||||
dnl Helper macros for Tor configure.ac
|
||||
dnl Copyright (c) 2001-2004, Roger Dingledine
|
||||
dnl Copyright (c) 2004-2006, Roger Dingledine, Nick Mathewson
|
||||
dnl Copyright (c) 2007-2008, Roger Dingledine, Nick Mathewson
|
||||
dnl Copyright (c) 2007-2019, The Tor Project, Inc.
|
||||
dnl See LICENSE for licensing information
|
||||
|
||||
AC_DEFUN([TOR_EXTEND_CODEPATH],
|
||||
[
|
||||
if test -d "$1/lib"; then
|
||||
LDFLAGS="-L$1/lib $LDFLAGS"
|
||||
else
|
||||
LDFLAGS="-L$1 $LDFLAGS"
|
||||
fi
|
||||
if test -d "$1/include"; then
|
||||
CPPFLAGS="-I$1/include $CPPFLAGS"
|
||||
else
|
||||
CPPFLAGS="-I$1 $CPPFLAGS"
|
||||
fi
|
||||
])
|
||||
|
||||
AC_DEFUN([TOR_DEFINE_CODEPATH],
|
||||
[
|
||||
if test x$1 = "x(system)"; then
|
||||
TOR_LDFLAGS_$2=""
|
||||
TOR_CPPFLAGS_$2=""
|
||||
else
|
||||
if test -d "$1/lib"; then
|
||||
TOR_LDFLAGS_$2="-L$1/lib"
|
||||
TOR_LIBDIR_$2="$1/lib"
|
||||
else
|
||||
TOR_LDFLAGS_$2="-L$1"
|
||||
TOR_LIBDIR_$2="$1"
|
||||
fi
|
||||
if test -d "$1/include"; then
|
||||
TOR_CPPFLAGS_$2="-I$1/include"
|
||||
else
|
||||
TOR_CPPFLAGS_$2="-I$1"
|
||||
fi
|
||||
fi
|
||||
AC_SUBST(TOR_CPPFLAGS_$2)
|
||||
AC_SUBST(TOR_LDFLAGS_$2)
|
||||
])
|
||||
|
||||
dnl 1: flags
|
||||
dnl 2: try to link too if this is nonempty.
|
||||
dnl 3: what to do on success compiling
|
||||
dnl 4: what to do on failure compiling
|
||||
AC_DEFUN([TOR_TRY_COMPILE_WITH_CFLAGS], [
|
||||
AS_VAR_PUSHDEF([VAR],[tor_cv_cflags_$1])
|
||||
AC_CACHE_CHECK([whether the compiler accepts $1], VAR, [
|
||||
tor_saved_CFLAGS="$CFLAGS"
|
||||
CFLAGS="$CFLAGS -pedantic -Werror $1"
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[]])],
|
||||
[AS_VAR_SET(VAR,yes)],
|
||||
[AS_VAR_SET(VAR,no)])
|
||||
if test x$2 != x; then
|
||||
AS_VAR_PUSHDEF([can_link],[tor_can_link_$1])
|
||||
AC_LINK_IFELSE([AC_LANG_PROGRAM([[]], [[]])],
|
||||
[AS_VAR_SET(can_link,yes)],
|
||||
[AS_VAR_SET(can_link,no)])
|
||||
AS_VAR_POPDEF([can_link])
|
||||
fi
|
||||
CFLAGS="$tor_saved_CFLAGS"
|
||||
])
|
||||
if test x$VAR = xyes; then
|
||||
$3
|
||||
else
|
||||
$4
|
||||
fi
|
||||
AS_VAR_POPDEF([VAR])
|
||||
])
|
||||
|
||||
dnl 1:flags
|
||||
dnl 2:also try to link (yes: non-empty string)
|
||||
dnl will set yes or no in $tor_can_link_$1 (as modified by AS_VAR_PUSHDEF)
|
||||
AC_DEFUN([TOR_CHECK_CFLAGS], [
|
||||
TOR_TRY_COMPILE_WITH_CFLAGS($1, $2, CFLAGS="$CFLAGS $1", true)
|
||||
])
|
||||
|
||||
dnl 1:flags
|
||||
dnl 2:extra ldflags
|
||||
dnl 3:extra libraries
|
||||
AC_DEFUN([TOR_CHECK_LDFLAGS], [
|
||||
AS_VAR_PUSHDEF([VAR],[tor_cv_ldflags_$1])
|
||||
AC_CACHE_CHECK([whether the linker accepts $1], VAR, [
|
||||
tor_saved_CFLAGS="$CFLAGS"
|
||||
tor_saved_LDFLAGS="$LDFLAGS"
|
||||
tor_saved_LIBS="$LIBS"
|
||||
CFLAGS="$CFLAGS -pedantic -Werror"
|
||||
LDFLAGS="$LDFLAGS $2 $1"
|
||||
LIBS="$LIBS $3"
|
||||
AC_RUN_IFELSE([AC_LANG_PROGRAM([#include <stdio.h>], [fputs("", stdout)])],
|
||||
[AS_VAR_SET(VAR,yes)],
|
||||
[AS_VAR_SET(VAR,no)],
|
||||
[AC_LINK_IFELSE([AC_LANG_PROGRAM([[]], [[]])],
|
||||
[AS_VAR_SET(VAR,yes)],
|
||||
[AS_VAR_SET(VAR,no)])])
|
||||
CFLAGS="$tor_saved_CFLAGS"
|
||||
LDFLAGS="$tor_saved_LDFLAGS"
|
||||
LIBS="$tor_saved_LIBS"
|
||||
])
|
||||
if test x$VAR = xyes; then
|
||||
LDFLAGS="$LDFLAGS $1"
|
||||
fi
|
||||
AS_VAR_POPDEF([VAR])
|
||||
])
|
||||
|
||||
dnl 1:libname
|
||||
AC_DEFUN([TOR_WARN_MISSING_LIB], [
|
||||
h=""
|
||||
if test x$2 = xdevpkg; then
|
||||
h=" headers for"
|
||||
fi
|
||||
if test -f /etc/debian_version && test x"$tor_$1_$2_debian" != x; then
|
||||
AC_MSG_WARN([On Debian, you can install$h $1 using "apt-get install $tor_$1_$2_debian"])
|
||||
if test x"$tor_$1_$2_debian" != x"$tor_$1_devpkg_debian"; then
|
||||
AC_MSG_WARN([ You will probably need $tor_$1_devpkg_debian too.])
|
||||
fi
|
||||
fi
|
||||
if test -f /etc/fedora-release && test x"$tor_$1_$2_redhat" != x; then
|
||||
AC_MSG_WARN([On Fedora, you can install$h $1 using "dnf install $tor_$1_$2_redhat"])
|
||||
if test x"$tor_$1_$2_redhat" != x"$tor_$1_devpkg_redhat"; then
|
||||
AC_MSG_WARN([ You will probably need to install $tor_$1_devpkg_redhat too.])
|
||||
fi
|
||||
else
|
||||
if test -f /etc/redhat-release && test x"$tor_$1_$2_redhat" != x; then
|
||||
AC_MSG_WARN([On most Redhat-based systems, you can get$h $1 by installing the $tor_$1_$2_redhat RPM package])
|
||||
if test x"$tor_$1_$2_redhat" != x"$tor_$1_devpkg_redhat"; then
|
||||
AC_MSG_WARN([ You will probably need to install $tor_$1_devpkg_redhat too.])
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
])
|
||||
|
||||
dnl Look for a library, and its associated includes, and how to link
|
||||
dnl against it.
|
||||
dnl
|
||||
dnl TOR_SEARCH_LIBRARY(1:libname, 2:IGNORED, 3:linkargs, 4:headers,
|
||||
dnl 5:prototype,
|
||||
dnl 6:code, 7:IGNORED, 8:searchextra)
|
||||
dnl
|
||||
dnl Special variables:
|
||||
dnl ALT_{libname}_WITHVAL -- another possible value for --with-$1-dir.
|
||||
dnl Used to support renaming --with-ssl-dir to --with-openssl-dir
|
||||
dnl
|
||||
AC_DEFUN([TOR_SEARCH_LIBRARY], [
|
||||
try$1dir=""
|
||||
AC_ARG_WITH($1-dir,
|
||||
AS_HELP_STRING(--with-$1-dir=PATH, [specify path to $1 installation]),
|
||||
[
|
||||
if test x$withval != xno ; then
|
||||
try$1dir="$withval"
|
||||
fi
|
||||
])
|
||||
if test "x$try$1dir" = x && test "x$ALT_$1_WITHVAL" != x ; then
|
||||
try$1dir="$ALT_$1_WITHVAL"
|
||||
fi
|
||||
|
||||
tor_saved_LIBS="$LIBS"
|
||||
tor_saved_LDFLAGS="$LDFLAGS"
|
||||
tor_saved_CPPFLAGS="$CPPFLAGS"
|
||||
AC_CACHE_CHECK([for $1 directory], tor_cv_library_$1_dir, [
|
||||
tor_$1_dir_found=no
|
||||
tor_$1_any_linkable=no
|
||||
|
||||
for tor_trydir in "$try$1dir" "(system)" "$prefix" /usr/local /usr/pkg $8; do
|
||||
LDFLAGS="$tor_saved_LDFLAGS"
|
||||
LIBS="$3 $tor_saved_LIBS"
|
||||
CPPFLAGS="$tor_saved_CPPFLAGS"
|
||||
|
||||
if test -z "$tor_trydir" ; then
|
||||
continue;
|
||||
fi
|
||||
|
||||
# Skip the directory if it isn't there.
|
||||
if test ! -d "$tor_trydir" && test "$tor_trydir" != "(system)"; then
|
||||
continue;
|
||||
fi
|
||||
|
||||
# If this isn't blank, try adding the directory (or appropriate
|
||||
# include/libs subdirectories) to the command line.
|
||||
if test "$tor_trydir" != "(system)"; then
|
||||
TOR_EXTEND_CODEPATH($tor_trydir)
|
||||
fi
|
||||
|
||||
# Can we link against (but not necessarily run, or find the headers for)
|
||||
# the binary?
|
||||
AC_LINK_IFELSE([AC_LANG_PROGRAM([$5], [$6])],
|
||||
[linkable=yes], [linkable=no])
|
||||
|
||||
if test "$linkable" = yes; then
|
||||
tor_$1_any_linkable=yes
|
||||
# Okay, we can link against it. Can we find the headers?
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([$4], [$6])],
|
||||
[buildable=yes], [buildable=no])
|
||||
if test "$buildable" = yes; then
|
||||
tor_cv_library_$1_dir=$tor_trydir
|
||||
tor_$1_dir_found=yes
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if test "$tor_$1_dir_found" = no; then
|
||||
if test "$tor_$1_any_linkable" = no ; then
|
||||
AC_MSG_WARN([Could not find a linkable $1. If you have it installed somewhere unusual, you can specify an explicit path using --with-$1-dir])
|
||||
TOR_WARN_MISSING_LIB($1, pkg)
|
||||
AC_MSG_ERROR([Missing libraries; unable to proceed.])
|
||||
else
|
||||
AC_MSG_WARN([We found the libraries for $1, but we could not find the C header files. You may need to install a devel package.])
|
||||
TOR_WARN_MISSING_LIB($1, devpkg)
|
||||
AC_MSG_ERROR([Missing headers; unable to proceed.])
|
||||
fi
|
||||
fi
|
||||
|
||||
LDFLAGS="$tor_saved_LDFLAGS"
|
||||
LIBS="$tor_saved_LIBS"
|
||||
CPPFLAGS="$tor_saved_CPPFLAGS"
|
||||
]) dnl end cache check
|
||||
|
||||
LIBS="$3 $LIBS"
|
||||
if test "$tor_cv_library_$1_dir" != "(system)"; then
|
||||
TOR_EXTEND_CODEPATH($tor_cv_library_$1_dir)
|
||||
fi
|
||||
|
||||
TOR_DEFINE_CODEPATH($tor_cv_library_$1_dir, $1)
|
||||
|
||||
if test "$cross_compiling" != yes; then
|
||||
AC_CACHE_CHECK([whether we need extra options to link $1],
|
||||
tor_cv_library_$1_linker_option, [
|
||||
orig_LDFLAGS="$LDFLAGS"
|
||||
runs=no
|
||||
linked_with=nothing
|
||||
if test -d "$tor_cv_library_$1_dir/lib"; then
|
||||
tor_trydir="$tor_cv_library_$1_dir/lib"
|
||||
else
|
||||
tor_trydir="$tor_cv_library_$1_dir"
|
||||
fi
|
||||
for tor_tryextra in "(none)" "-Wl,-R$tor_trydir" "-R$tor_trydir" \
|
||||
"-Wl,-rpath,$tor_trydir" ; do
|
||||
if test "$tor_tryextra" = "(none)"; then
|
||||
LDFLAGS="$orig_LDFLAGS"
|
||||
else
|
||||
LDFLAGS="$tor_tryextra $orig_LDFLAGS"
|
||||
fi
|
||||
AC_RUN_IFELSE([AC_LANG_PROGRAM([$5], [$6])],
|
||||
[runnable=yes], [runnable=no],
|
||||
[AC_LINK_IFELSE([AC_LANG_PROGRAM([[]], [[]])],
|
||||
[runnable=yes],
|
||||
[runnable=no])])
|
||||
if test "$runnable" = yes; then
|
||||
tor_cv_library_$1_linker_option=$tor_tryextra
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if test "$runnable" = no; then
|
||||
AC_MSG_ERROR([Found linkable $1 in $tor_cv_library_$1_dir, but it does not seem to run, even with -R. Maybe specify another using --with-$1-dir}])
|
||||
fi
|
||||
LDFLAGS="$orig_LDFLAGS"
|
||||
]) dnl end cache check check for extra options.
|
||||
|
||||
if test "$tor_cv_library_$1_linker_option" != "(none)" ; then
|
||||
TOR_LDFLAGS_$1="$TOR_LDFLAGS_$1 $tor_cv_library_$1_linker_option"
|
||||
fi
|
||||
fi # cross-compile
|
||||
|
||||
LIBS="$tor_saved_LIBS"
|
||||
LDFLAGS="$tor_saved_LDFLAGS"
|
||||
CPPFLAGS="$tor_saved_CPPFLAGS"
|
||||
|
||||
]) dnl end defun
|
||||
|
||||
dnl Check whether the prototype for a function is present or missing.
|
||||
dnl Apple has a nasty habit of putting functions in their libraries (so that
|
||||
dnl AC_CHECK_FUNCS passes) but not actually declaring them in the headers.
|
||||
dnl
|
||||
dnl TOR_CHECK_PROTYPE(1:functionname, 2:macroname, 2: includes)
|
||||
AC_DEFUN([TOR_CHECK_PROTOTYPE], [
|
||||
AC_CACHE_CHECK([for declaration of $1], tor_cv_$1_declared, [
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([$3],[void *ptr= $1 ;])],
|
||||
tor_cv_$1_declared=yes,tor_cv_$1_declared=no)])
|
||||
if test x$tor_cv_$1_declared != xno ; then
|
||||
AC_DEFINE($2, 1,
|
||||
[Defined if the prototype for $1 seems to be present.])
|
||||
fi
|
||||
])
|
24
autogen.sh
24
autogen.sh
@ -1,24 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
if command -v autoreconf; then
|
||||
opt="-i -f -W all,error"
|
||||
|
||||
for i in "$@"; do
|
||||
case "$i" in
|
||||
-v)
|
||||
opt="${opt} -v"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
exec autoreconf $opt
|
||||
fi
|
||||
|
||||
set -e
|
||||
|
||||
# Run this to generate all the initial makefiles, etc.
|
||||
aclocal -I m4 && \
|
||||
autoheader && \
|
||||
autoconf && \
|
||||
automake --add-missing --copy
|
@ -1,37 +0,0 @@
|
||||
This file is here to keep git from removing the changes directory when
|
||||
all the changes files have been merged.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
"I'm Nobody! Who are you?
|
||||
Are you--Nobody--too?
|
||||
Then there's a pair of us!
|
||||
Don’t tell! they'd advertise--you know!
|
||||
|
||||
How dreary--to be--Somebody!
|
||||
How public--like a Frog--
|
||||
To tell one's name--the livelong June--
|
||||
To an admiring Bog!"
|
||||
-- Emily Dickinson
|
||||
|
@ -1,10 +0,0 @@
|
||||
o Minor bugfixes (conflux):
|
||||
- Fix stream attachment order when creating conflux circuits, so that
|
||||
stream attachment happens after finishing the full link handshake,
|
||||
rather than upon set finalization. Fixes bug 40801; bugfix on
|
||||
0.4.8.1-alpha.
|
||||
- Remove a "BUG" warning from conflux_pick_first_leg that can be
|
||||
triggered by broken or malicious clients. Fixes bug 40801; bugfix
|
||||
on 0.4.8.1-alpha.
|
||||
- Fix a case where we were resuming reading on edge connections that
|
||||
were already marked for close. Fixes bug 40801; bugfix on 0.4.8.1-alpha.
|
@ -1,4 +0,0 @@
|
||||
o Minor bugfixes (conflux):
|
||||
- Handle legs being closed or destroyed before computing an RTT
|
||||
(resulting in warns about too many legs). Fixes bug 40810; bugfix on
|
||||
0.4.8.1-alpha.
|
@ -1,6 +0,0 @@
|
||||
o Minor bugfixes (conflux):
|
||||
- Count leg launch attempts prior to attempting to launch them. This
|
||||
avoids inifinite launch attempts due to internal circuit building
|
||||
failures. Additionally, double-check that we have enough exits in
|
||||
our consensus overall, before attempting to launch conflux sets.
|
||||
Fixes bug 40811; bugfix on 0.4.8.1-alpha.
|
@ -1,3 +0,0 @@
|
||||
o Minor feature (hs):
|
||||
- Fix compiler warnings in equix and hashx when building with clang.
|
||||
Closes ticket 40800.
|
2838
configure.ac
2838
configure.ac
File diff suppressed because it is too large
Load Diff
@ -1,64 +0,0 @@
|
||||
The contrib/ directory contains small tools that might be useful for using
|
||||
with Tor. A few of them are included in the Tor source distribution; you can
|
||||
find the others in the main Tor repository. We don't guarantee that they're
|
||||
particularly useful.
|
||||
|
||||
dirauth-tools/ -- Tools useful for directory authority administrators
|
||||
---------------------------------------------------------------------
|
||||
|
||||
add-tor is an old script to manipulate the approved-routers file.
|
||||
|
||||
nagios-check-tor-authority-cert is a nagios script to check when Tor
|
||||
authority certificates are expired or nearly expired.
|
||||
|
||||
clang/ -- Files for use with the clang compiler
|
||||
-----------------------------------------------
|
||||
|
||||
sanitize_blacklist.txt is used to build Tor with clang's dynamic
|
||||
AddressSanitizer and UndefinedBehaviorSanitizer. It contains detailed
|
||||
instructions on configuration, build, and testing with clang's sanitizers.
|
||||
|
||||
client-tools/ -- Tools for use with Tor clients
|
||||
-----------------------------------------------
|
||||
|
||||
torify is a small wrapper script around torsocks.
|
||||
|
||||
tor-resolve.py uses Tor's SOCKS port extensions to perform DNS lookups. You
|
||||
should probably use src/tools/tor-resolve instead.
|
||||
|
||||
dist/ -- Scripts and files for use when packaging Tor
|
||||
-----------------------------------------------------
|
||||
|
||||
torctl and tor.sh are init scripts for use with SysV-style init
|
||||
tools. Everybody likes to write init scripts differently, it seems.
|
||||
|
||||
operator-tools/ -- Tools for Tor relay operators
|
||||
------------------------------------------------
|
||||
|
||||
tor-exit-notice.html is an HTML file for use with the DirPortFrontPage
|
||||
option. It tells visitors that your relay is a Tor exit node, and that they
|
||||
shouldn't assume you're the origin for the traffic that you're delivering.
|
||||
|
||||
tor.logrotate is a configuration file for use with the logrotate tool. You
|
||||
may need to edit it to work for you.
|
||||
|
||||
linux-tor-prio.sh uses Linux iptables tools to traffic-shape your Tor relay's
|
||||
traffic. If it breaks, you get to keep both pieces.
|
||||
|
||||
or-tools/ -- Tools for interacting with relays
|
||||
----------------------------------------------
|
||||
|
||||
checksocks.pl is a tool to scan relays to see if any of them have advertised
|
||||
public SOCKS ports, so we can tell them not to.
|
||||
|
||||
check-tor is a quick shell script to try doing a TLS handshake with a router
|
||||
or to try fetching a directory from it.
|
||||
|
||||
exitlist is a precursor of check.torproject.org: it parses a bunch of cached
|
||||
server descriptors to determine which can connect to a given address:port.
|
||||
|
||||
win32build -- Old files for windows packaging
|
||||
---------------------------------------------
|
||||
|
||||
You shouldn't need these unless you're building some of the older Windows
|
||||
packages.
|
@ -1,103 +0,0 @@
|
||||
# clang sanitizer special case list
|
||||
# syntax specified in https://clang.llvm.org/docs/SanitizerSpecialCaseList.html
|
||||
# for more info see https://clang.llvm.org/docs/AddressSanitizer.html
|
||||
|
||||
#
|
||||
# Tor notes: This file is obsolete!
|
||||
#
|
||||
# It was necessary in order to apply the sanitizers to all of tor. But
|
||||
# we don't believe that's a good idea: some parts of tor need constant-time
|
||||
# behavior that is hard to guarantee with these sanitizers.
|
||||
#
|
||||
# If you need this behavior, then please consider --enable-expensive-hardening,
|
||||
# and report bugs as needed.
|
||||
#
|
||||
|
||||
# usage:
|
||||
# 1. configure tor build:
|
||||
# ./configure \
|
||||
# CC=clang \
|
||||
# CFLAGS="-fsanitize-blacklist=contrib/clang/sanitize_blacklist.txt -fsanitize=undefined -fsanitize=address -fno-sanitize-recover=all -fno-omit-frame-pointer -fno-optimize-sibling-calls -fno-inline" \
|
||||
# LDFLAGS="-fsanitize=address" \
|
||||
# --disable-gcc-hardening
|
||||
# and any other flags required to build tor on your OS.
|
||||
#
|
||||
# 2. build tor:
|
||||
# make
|
||||
#
|
||||
# 3. test tor:
|
||||
# ASAN_OPTIONS=allow_user_segv_handler=1 make test
|
||||
# ASAN_OPTIONS=allow_user_segv_handler=1 make check
|
||||
# make test-network # requires chutney
|
||||
#
|
||||
# 4. the tor binary is now instrumented with clang sanitizers,
|
||||
# and can be run just like a standard tor binary
|
||||
|
||||
# Compatibility:
|
||||
# This blacklist has been tested with clang 3.7's UndefinedBehaviorSanitizer
|
||||
# and AddressSanitizer on OS X 10.10 Yosemite, with all tests passing
|
||||
# on both x86_64 and i386 (using CC="clang -arch i386")
|
||||
# It has not been tested with ThreadSanitizer or MemorySanitizer
|
||||
# Success report and patches for other sanitizers or OSs are welcome
|
||||
|
||||
# ccache and make don't account for the sanitizer blacklist as a dependency
|
||||
# you might need to set CCACHE_DISABLE=1 and/or use make clean to workaround
|
||||
|
||||
# Configuration Flags:
|
||||
# -fno-sanitize-recover=all
|
||||
# causes clang to crash on undefined behavior, rather than printing
|
||||
# a warning and continuing (the AddressSanitizer always crashes)
|
||||
# -fno-omit-frame-pointer -fno-optimize-sibling-calls -fno-inline
|
||||
# make clang backtraces easier to read
|
||||
# --disable-gcc-hardening
|
||||
# disables warnings about the redefinition of _FORTIFY_SOURCE
|
||||
# (it conflicts with the sanitizers)
|
||||
|
||||
# Turning the sanitizers off for particular functions:
|
||||
# (Unfortunately, exempting functions doesn't work for the blacklisted
|
||||
# functions below, and we can't turn the code off because it's essential)
|
||||
#
|
||||
# #if defined(__has_feature)
|
||||
# #if __has_feature(address_sanitizer)
|
||||
# /* tell clang AddressSanitizer not to instrument this function */
|
||||
# #define NOASAN __attribute__((no_sanitize_address))
|
||||
# #define _CLANG_ASAN_
|
||||
# #else
|
||||
# #define NOASAN
|
||||
# #endif
|
||||
# #else
|
||||
# #define NOASAN
|
||||
# #endif
|
||||
#
|
||||
# /* Telling AddressSanitizer to not instrument a function */
|
||||
# void func(void) NOASAN;
|
||||
#
|
||||
# /* Including or excluding sections of code */
|
||||
# #ifdef _CLANG_ASAN_
|
||||
# /* code that only runs under address sanitizer */
|
||||
# #else
|
||||
# /* code that doesn't run under address sanitizer */
|
||||
# #endif
|
||||
|
||||
# Blacklist Entries:
|
||||
|
||||
# test-memwipe.c checks if a freed buffer was properly wiped
|
||||
fun:vmemeq
|
||||
fun:check_a_buffer
|
||||
|
||||
# we need to allow the tor bt handler to catch SIGSEGV
|
||||
# otherwise address sanitizer munges the expected output and the test fails
|
||||
# we can do this by setting an environmental variable
|
||||
# See https://code.google.com/p/address-sanitizer/wiki/Flags
|
||||
# ASAN_OPTIONS=allow_user_segv_handler=1
|
||||
|
||||
# test_bt_cl.c stores to a NULL pointer to trigger a crash
|
||||
fun:crash
|
||||
|
||||
# curve25519-donna.c left-shifts 1 bits into and past the sign bit of signed
|
||||
# integers. Until #13538 is resolved, we exempt functions that do left shifts.
|
||||
# Note that x86_64 uses curve25519-donna-c64.c instead of curve25519-donna.c
|
||||
fun:freduce_coefficients
|
||||
fun:freduce_degree
|
||||
fun:s32_eq
|
||||
fun:fcontract
|
@ -1,158 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Future imports for Python 2.7, mandatory in 3.0
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import socket
|
||||
import struct
|
||||
import sys
|
||||
|
||||
def socks4AResolveRequest(hostname):
|
||||
version = 4
|
||||
command = 0xF0
|
||||
port = 0
|
||||
addr = 0x0000001
|
||||
username = ""
|
||||
reqheader = struct.pack("!BBHL", version, command, port, addr)
|
||||
return "%s%s\x00%s\x00"%(reqheader,username,hostname)
|
||||
|
||||
def socks4AParseResponse(response):
|
||||
RESPONSE_LEN = 8
|
||||
if len(response) < RESPONSE_LEN:
|
||||
return None
|
||||
assert len(response) >= RESPONSE_LEN
|
||||
version,status,port = struct.unpack("!BBH",response[:4])
|
||||
assert version == 0
|
||||
assert port == 0
|
||||
if status == 90:
|
||||
return "%d.%d.%d.%d"%tuple(map(ord, response[4:]))
|
||||
else:
|
||||
return "ERROR (status %d)"%status
|
||||
|
||||
def socks5Hello():
|
||||
return "\x05\x01\x00"
|
||||
def socks5ParseHello(response):
|
||||
if response != "\x05\x00":
|
||||
raise ValueError("Bizarre socks5 response")
|
||||
def socks5ResolveRequest(hostname, atype=0x03, command=0xF0):
|
||||
version = 5
|
||||
rsv = 0
|
||||
port = 0
|
||||
reqheader = struct.pack("!BBBB",version, command, rsv, atype)
|
||||
if atype == 0x03:
|
||||
reqheader += struct.pack("!B", len(hostname))
|
||||
portstr = struct.pack("!H",port)
|
||||
return "%s%s%s"%(reqheader,hostname,portstr)
|
||||
|
||||
def socks5ParseResponse(r):
|
||||
if len(r)<8:
|
||||
return None
|
||||
version, reply, rsv, atype = struct.unpack("!BBBB",r[:4])
|
||||
assert version==5
|
||||
assert rsv==0
|
||||
if reply != 0x00:
|
||||
return "ERROR",reply
|
||||
assert atype in (0x01,0x03,0x04)
|
||||
if atype != 0x03:
|
||||
expected_len = 4 + ({1:4,4:16}[atype]) + 2
|
||||
if len(r) < expected_len:
|
||||
return None
|
||||
elif len(r) > expected_len:
|
||||
raise ValueError("Overlong socks5 reply!")
|
||||
addr = r[4:-2]
|
||||
if atype == 0x01:
|
||||
return "%d.%d.%d.%d"%tuple(map(ord,addr))
|
||||
else:
|
||||
# not really the right way to format IPv6
|
||||
return "IPv6: %s"%(":".join([hex(ord(c)) for c in addr]))
|
||||
else:
|
||||
hlen, = struct.unpack("!B", r[4])
|
||||
expected_len = 5 + hlen + 2
|
||||
if len(r) < expected_len:
|
||||
return None
|
||||
return r[5:-2]
|
||||
|
||||
def socks5ResolvePTRRequest(hostname):
|
||||
return socks5ResolveRequest(socket.inet_aton(hostname),
|
||||
atype=1, command = 0xF1)
|
||||
|
||||
|
||||
def parseHostAndPort(h):
|
||||
host, port = "localhost", 9050
|
||||
if ":" in h:
|
||||
i = h.index(":")
|
||||
host = h[:i]
|
||||
try:
|
||||
port = int(h[i+1:])
|
||||
except ValueError:
|
||||
print("Bad hostname %r"%h)
|
||||
sys.exit(1)
|
||||
elif h:
|
||||
try:
|
||||
port = int(h)
|
||||
except ValueError:
|
||||
host = h
|
||||
|
||||
return host, port
|
||||
|
||||
def resolve(hostname, sockshost, socksport, socksver=4, reverse=0):
|
||||
assert socksver in (4,5)
|
||||
if socksver == 4:
|
||||
fmt = socks4AResolveRequest
|
||||
parse = socks4AParseResponse
|
||||
elif not reverse:
|
||||
fmt = socks5ResolveRequest
|
||||
parse = socks5ParseResponse
|
||||
else:
|
||||
fmt = socks5ResolvePTRRequest
|
||||
parse = socks5ParseResponse
|
||||
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.connect((sockshost,socksport))
|
||||
if socksver == 5:
|
||||
s.send(socks5Hello())
|
||||
socks5ParseHello(s.recv(2))
|
||||
s.send(fmt(hostname))
|
||||
answer = s.recv(6)
|
||||
result = parse(answer)
|
||||
while result is None:
|
||||
more = s.recv(1)
|
||||
if not more:
|
||||
return None
|
||||
answer += more
|
||||
result = parse(answer)
|
||||
print("Got answer",result)
|
||||
m = s.recv(1)
|
||||
if m:
|
||||
print("Got extra data too: %r"%m)
|
||||
return result
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) not in (2,3,4):
|
||||
print("Syntax: resolve.py [-4|-5] hostname [sockshost:socksport]")
|
||||
sys.exit(0)
|
||||
socksver = 4
|
||||
reverse = 0
|
||||
while sys.argv[1][0] == '-':
|
||||
if sys.argv[1] in ("-4", "-5"):
|
||||
socksver = int(sys.argv[1][1])
|
||||
del sys.argv[1]
|
||||
elif sys.argv[1] == '-x':
|
||||
reverse = 1
|
||||
del sys.argv[1]
|
||||
elif sys.argv[1] == '--':
|
||||
break
|
||||
|
||||
if len(sys.argv) >= 4:
|
||||
print("Syntax: resolve.py [-x] [-4|-5] hostname [sockshost:socksport]")
|
||||
sys.exit(0)
|
||||
if len(sys.argv) == 3:
|
||||
sh,sp = parseHostAndPort(sys.argv[2])
|
||||
else:
|
||||
sh,sp = parseHostAndPort("")
|
||||
|
||||
if reverse and socksver == 4:
|
||||
socksver = 5
|
||||
resolve(sys.argv[1], sh, sp, socksver, reverse)
|
@ -1,61 +0,0 @@
|
||||
#! /bin/sh
|
||||
|
||||
# This script used to call (the now deprecated) tsocks as a fallback in case
|
||||
# torsocks wasn't installed.
|
||||
# Now, it's just a backwards compatible shim around torsocks with reasonable
|
||||
# behavior if -v/--verbose or -h/--help arguments are passed.
|
||||
#
|
||||
# Copyright (c) 2004, 2006, 2009 Peter Palfrader
|
||||
# Modified by Jacob Appelbaum <jacob@appelbaum.net> April 16th 2006
|
||||
# Stripped of all the tsocks cruft by ugh on February 22nd 2012
|
||||
# May be distributed under the same terms as Tor itself
|
||||
|
||||
|
||||
compat() {
|
||||
echo "torify is now just a wrapper around torsocks(1) for backwards compatibility."
|
||||
}
|
||||
|
||||
usage() {
|
||||
compat
|
||||
echo "Usage: $0 [-hv] <command> [<options>...]"
|
||||
}
|
||||
|
||||
case $# in 0)
|
||||
usage >&2
|
||||
exit 1
|
||||
esac
|
||||
|
||||
case $# in 1)
|
||||
case $1 in -h|--help)
|
||||
usage
|
||||
exit 0
|
||||
esac
|
||||
esac
|
||||
|
||||
case $1 in -v|--verbose)
|
||||
compat >&2
|
||||
shift
|
||||
esac
|
||||
|
||||
# taken from Debian's Developer's Reference, 6.4
|
||||
pathfind() {
|
||||
OLDIFS="$IFS"
|
||||
IFS=:
|
||||
for p in $PATH; do
|
||||
if [ -x "$p/$*" ]; then
|
||||
IFS="$OLDIFS"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
IFS="$OLDIFS"
|
||||
return 1
|
||||
}
|
||||
|
||||
if pathfind torsocks; then
|
||||
exec torsocks "$@"
|
||||
echo "$0: Failed to exec torsocks $*" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "$0: torsocks not found in your PATH. Perhaps it isn't installed? (tsocks is no longer supported, for security reasons.)" >&2
|
||||
fi
|
||||
|
@ -1,115 +0,0 @@
|
||||
#!/usr/bin/ruby
|
||||
|
||||
# add-tor - Add a tor fingerprint line to the approved-routers file
|
||||
#
|
||||
# Tor's approved-routers file is expected to be versioned using RCS.
|
||||
# This script checks for uncommitted changes, does a checkout of the
|
||||
# file, adds the new fingerprint with a comment stating the server's
|
||||
# operator, and commits the file to RCS again (using -u so that the
|
||||
# working copy is not removed.
|
||||
#
|
||||
# Operator and fingerprint line are read from stdin.
|
||||
#
|
||||
# Before adding a fingerprint line, approved-routers is checked for
|
||||
# rough syntactical correctness. This script also checks that the
|
||||
# nickname and fingerprint to be added do not already exist in the
|
||||
# binding list.
|
||||
|
||||
|
||||
# Copyright (c) 2006 by Peter Palfrader
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
BINDING = '/etc/tor/approved-routers'
|
||||
|
||||
def mysys(cmd)
|
||||
unless system(cmd)
|
||||
STDERR.puts "ERROR: #{cmd} failed"
|
||||
exit 1
|
||||
end
|
||||
end
|
||||
|
||||
def check_nick(n)
|
||||
n =~ /^[a-zA-Z0-9]+$/
|
||||
end
|
||||
|
||||
def check_fpr(fpr)
|
||||
fpr =~ /^([0-9A-F]{4} ){9}[0-9A-F]{4}$/
|
||||
end
|
||||
|
||||
def parse_fprline(fprline)
|
||||
n = fprline[0 ... fprline.index(' ')]
|
||||
f = fprline[fprline.index(' ') + 1 .. -1 ]
|
||||
unless check_nick(n) and check_fpr(f)
|
||||
STDERR.puts "Invalid fpr syntax '#{fprline}'"
|
||||
exit 1
|
||||
end
|
||||
[n, f]
|
||||
end
|
||||
|
||||
|
||||
|
||||
unless system("rcsdiff -q -u #{BINDING}")
|
||||
STDERR.puts "Uncommitted changes in #{BINDING}. Aborting."
|
||||
exit 1
|
||||
end
|
||||
|
||||
puts "Checking out #{BINDING}..."
|
||||
mysys("co -l #{BINDING}")
|
||||
|
||||
print "Operator: "
|
||||
@operator = readline.chop
|
||||
unless @operator.index('@')
|
||||
STDERR.puts "ERROR: No @ found"
|
||||
exit 1
|
||||
end
|
||||
|
||||
print "FPR Line: "
|
||||
@fprline = readline.chop
|
||||
(@nickname, @fpr) = parse_fprline(@fprline)
|
||||
|
||||
binding = File.new(BINDING, "r+")
|
||||
binding.readlines.each do |line|
|
||||
line.chop!
|
||||
next if line[0..0] == "#"
|
||||
(n,f) = parse_fprline(line)
|
||||
if (n == @nickname)
|
||||
STDERR.puts
|
||||
STDERR.puts "ERROR: Nickname #{n} already exists in #{BINDING} (fpr: #{f})"
|
||||
exit 1
|
||||
end
|
||||
if (f == @fpr)
|
||||
STDERR.puts
|
||||
STDERR.puts "ERROR: Fpr #{f} already exists in #{BINDING} (nickname: #{n})"
|
||||
exit 1
|
||||
end
|
||||
end
|
||||
|
||||
puts
|
||||
puts '| # ' + @operator
|
||||
puts '| ' + @fprline
|
||||
puts
|
||||
|
||||
binding.puts '# '+@operator
|
||||
binding.puts @fprline
|
||||
binding.close
|
||||
|
||||
puts "Committing #{BINDING}..."
|
||||
mysys("ci -u -m'Add #{@nickname}' #{BINDING}")
|
@ -1,86 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# nagios-check-tor-authority-cert - check certificate expiry time
|
||||
|
||||
# A nagios check for Tor v3 directory authorities:
|
||||
# - Checks the current certificate expiry time
|
||||
#
|
||||
# Usage: nagios-check-tor-authority-cert <authority identity fingerprint>
|
||||
# e.g.: nagios-check-tor-authority-cert A9AC67E64B200BBF2FA26DF194AC0469E2A948C6
|
||||
|
||||
# Copyright (c) 2008 Peter Palfrader <peter@palfrader.org>
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
set -e
|
||||
set -u
|
||||
|
||||
if [ -z "${1:-}" ]; then
|
||||
echo "Usage: $0 <authority identity fingerprint>" 2>&1
|
||||
exit 3
|
||||
fi
|
||||
|
||||
identity="$1"
|
||||
|
||||
DIRSERVERS=""
|
||||
DIRSERVERS="$DIRSERVERS 86.59.21.38:80" # tor26
|
||||
DIRSERVERS="$DIRSERVERS 128.31.0.34:9031" # moria1
|
||||
DIRSERVERS="$DIRSERVERS 216.224.124.114:9030" # ides
|
||||
DIRSERVERS="$DIRSERVERS 80.190.246.100:80" # gabelmoo
|
||||
#DIRSERVERS="$DIRSERVERS 140.247.60.64:80" # lefkada
|
||||
DIRSERVERS="$DIRSERVERS 194.109.206.212:80" # dizum
|
||||
DIRSERVERS="$DIRSERVERS 213.73.91.31:80" # dannenberg
|
||||
|
||||
TMPFILE=$(mktemp)
|
||||
trap 'rm -f "$TMPFILE"' 0
|
||||
|
||||
for dirserver in $DIRSERVERS; do
|
||||
if wget -q -O "$TMPFILE" "http://$dirserver/tor/keys/fp/$identity"
|
||||
then
|
||||
break
|
||||
else
|
||||
cat /dev/null > "$TMPFILE"
|
||||
continue
|
||||
fi
|
||||
done
|
||||
|
||||
if ! [ -s "$TMPFILE" ] ; then
|
||||
echo "UNKNOWN: Downloading certificate for $identity failed."
|
||||
exit 3
|
||||
fi
|
||||
|
||||
expirydate="$(awk '$1=="dir-key-expires" {printf "%s %s", $2, $3}' < "$TMPFILE")"
|
||||
expiryunix=$(TZ=UTC date -d "$expirydate" +%s)
|
||||
now=$(date +%s)
|
||||
|
||||
if [ "$now" -ge "$expiryunix" ]; then
|
||||
echo "CRITICAL: Certificate expired $expirydate (authority $identity)."
|
||||
exit 2
|
||||
elif [ "$(( now + 7*24*60*60 ))" -ge "$expiryunix" ]; then
|
||||
echo "CRITICAL: Certificate expires $expirydate (authority $identity)."
|
||||
exit 2
|
||||
elif [ "$(( now + 30*24*60*60 ))" -ge "$expiryunix" ]; then
|
||||
echo "WARNING: Certificate expires $expirydate (authority $identity)."
|
||||
exit 1
|
||||
else
|
||||
echo "OK: Certificate expires $expirydate (authority $identity)."
|
||||
exit 0
|
||||
fi
|
@ -1,11 +0,0 @@
|
||||
|
||||
EXTRA_DIST+= \
|
||||
contrib/README \
|
||||
contrib/client-tools/torify \
|
||||
contrib/operator-tools/tor-exit-notice.html \
|
||||
contrib/or-tools/exitlist \
|
||||
contrib/win32build/tor-mingw.nsi.in \
|
||||
contrib/win32build/tor.ico \
|
||||
contrib/win32build/tor.nsi.in
|
||||
|
||||
bin_SCRIPTS+= contrib/client-tools/torify
|
@ -1,348 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en-US">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>This is a Tor Exit Router</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<link rel="icon" type="image/png" href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAMAAABEpIrGAAAAAXNSR0IArs4c6QAAAMxQTFRFAAAAcK9AAAAAaK9IAAAAaq9FAAAAaK9EAAAAaa9DAAAAaK9FAAAAaa9EAAAAaK9EAgICZ7FEAgICaLBDAgICZ7BEAQEBaLBEAQEBZ7BEAQEBaLBDAQEBZ7BEHDATAQEB9fje5unQ19nCx8q1uLqnqauZmpuLaLBEiox9e31wbG1iSHkvfkeYXV5UdkOPbj6FZzp8TU5GXzZyVzFpPj84Ty1fRyhWQCRNLy8qLykwJyQmMBs6ICAdKBcwIBMnGA4dEBAPEQoUCQUKAQEBYGuyKwAAACB0Uk5TABAQICAwMEBAUFBgYHBwgICPj5+fr6+/v8/P39/v7+9rEnzIAAABbUlEQVR42n2SbVuCMBSG8TXzpUzN1DTOBoyNmIZOpUJS9///U0f7EFzIno+77z3b2TXrZmo1y5i7oZl3ppXOsFKKHzuL3uu0nDeeF1PfwK3hwjfyiu/7C9MM9yg0ynG7/uT7PcP+vn57f7EMqUduPG+aDA0ibRl4VUNw6Bv4WNJ4p0sr2rYijKWb86B6e/uRA/U8sVke7HaRN+cxBZBEkM8wxJLCgLYkQCIGzGXbMPwoGDMJ4CkHqKPIIbwY+at2EwCBDnCXsa8Qs5/nC7BbIPfcAKRaobA8515UExIh5wwrpFyHmNyDtRIEQJSDkuTqKuzHWUEBFkTIPSaY2l+FSXbIBAWPA9BAuDL5E0ZWJjYVjiDYTwKi9PWI9CErjIQTCAqUg3D0VVjpelZoHWnEObguCKH1EoXviZXLIHbihLgMBNc/yLc2FuQyioMkAgkq0jvkul38sEedRAFL9Gm5Tm9+zPrI1pekp1n3f/UXYAE8WMMDnnoAAAAASUVORK5CYII=">
|
||||
<!--
|
||||
|
||||
This notice is intended to be placed on a virtual host for a domain that
|
||||
your Tor exit node IP reverse resolves to so that people who may be about
|
||||
to file an abuse complaint would check it first before bothering you or
|
||||
your ISP. Ex:
|
||||
https://tor-exit.yourdomain.org or https://tor-readme.yourdomain.org.
|
||||
|
||||
This type of setup has proven very effective at reducing abuse complaints
|
||||
for exit node operators.
|
||||
|
||||
There are a few places in this document that you may want to customize.
|
||||
They are marked with FIXME.
|
||||
|
||||
-->
|
||||
<style>
|
||||
:root{
|
||||
--background-color: white;
|
||||
--text-color: rgb(33, 37, 41);
|
||||
--link-color: rgb(116, 42, 152);
|
||||
}
|
||||
@media (prefers-color-scheme: dark){
|
||||
:root{
|
||||
--background-color: rgb(34, 34, 34);
|
||||
--text-color: rgb(221, 221, 221);
|
||||
--link-color: rgb(219, 142, 255);
|
||||
}
|
||||
}
|
||||
*{
|
||||
font-family: Arial;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
html{
|
||||
background: var(--background-color);
|
||||
}
|
||||
body{
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
padding-left: 5vw;
|
||||
padding-right: 5vw;
|
||||
max-width: 1000px;
|
||||
}
|
||||
h1, p{
|
||||
color: var(--text-color);
|
||||
}
|
||||
h1{
|
||||
font-size: 55px;
|
||||
text-align: center;
|
||||
}
|
||||
p, a{
|
||||
font-size: 20px;
|
||||
}
|
||||
a{
|
||||
color: var(--link-color);
|
||||
text-decoration: none;
|
||||
}
|
||||
a:hover{
|
||||
filter: brightness(.8);
|
||||
text-decoration: underline;
|
||||
}
|
||||
.links{
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
justify-content: space-evenly;
|
||||
}
|
||||
.links > a{
|
||||
margin: 10px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
</style>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
<main>
|
||||
<h1>This is a Tor Exit Router</h1>
|
||||
<p style="text-align:center">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="500" viewBox="0 0 274.3 159.9" style="width:100%;max-width:500px">
|
||||
<desc></desc>
|
||||
<defs>
|
||||
<style>
|
||||
.a{fill:none}.b,.e{fill:#895ba5}.b{stroke:#895ba5}.j,.l,.m,.o,.b,.d{stroke-miterlimit:10}.l,.b{stroke-width:.75px}.k,.c,.f{fill:#24093b}.a,.c,.f{stroke:#24093b;stroke-linecap:round;stroke-linejoin:round}.m,.o,.c,.d{stroke-width:.5px}.i,.j,.d{fill:#6fc8b7}.j,.d{stroke:#6fc8b7}.l,.m,.g{fill:#fed916}.h{fill:#fff}.a{stroke-width:1.6px}.j{stroke-width:.68px}.l,.m{stroke:#fed916}.n,.o{fill:#cf63a6}.o{stroke:#cf63a6}
|
||||
</style>
|
||||
</defs>
|
||||
<path d="M153.6 27.5s6.2 13.1 18.1 3c-8-5.7-10.9-7.6-10.9-7.6z" class="c"/>
|
||||
<path d="M212.9 96.2c3.2 6.5 5.2 13.8 9 20 2.3 3.8 4 4.5 8.3 3.3 8.2-2.3 17.4-6.3 25.6-9.7l-1.2-7.6a7.5 7.5 0 0 1-3.2 1.2c-2 0-4-1-5.8-1.6-1.4-.4-3.9.4-4.6.8-3.5 1.9-4.8 4-7 7-.6-7.8-1.2-15.6-1.5-23.4-6.6 3.4-12.9 7-19.6 10z" class="d"/>
|
||||
<path d="M217.2 108.6c.6 1.3 1.7 2 1.5 2.8 1.7 1.9 1.8 5.2 3 6.6 2.3-4 5.5-7.4 7.9-11.2.5-1.5 1.8-2.3 2.4-3.3.7-1.8 2.8-4.4 2.4-7.1.6-3.7.4-7.4 1-11-4-5-19.5 18.8-19.5 19.2-.4.5 1.4 2.6 1.3 4zm3.4 6.8c-.4-.3.3-.5 0 0z" class="e"/>
|
||||
<path d="M221 115.4c-1.2-3.3-.3-9.3 1.4-11.6 2.3-3 5.3-3.6 7.4-5.5 1.7-1.4 3.3-4.8 3.6-5.9 6.3-23.1-13.8 5.3-17.2 11.2 1.6 4 3.1 8 4.9 11.8z" class="f"/>
|
||||
<path d="M220.6 97c2.5-2.7 5.3-5.1 7.7-8-5-.4-10-.8-14.8-1.5-4.1 5.4-8 11.1-11.9 16.7-8 13.6-14.5 28.2-21.6 42.3a9.1 9.1 0 0 0 5 2c.3.1.8.4 1.2.2.4-.3.4 0 .9-.6.4-.6 0 .4.8-.9 3-4.7 5.4-10.1 9-15.1.6-.9 2.7-2.8 4-4.5a16.8 16.8 0 0 0 2.3-3.2c2.7-4.9 5.5-11.3 9.2-16.8a119.3 119.3 0 0 1 8.2-10.6z" class="d"/>
|
||||
<path d="M233.4 75.7c0-.1-.2-.1-.2 0v.2zm-3.6 2.3c-2.5 1.1-4.2 2.1-6.5 2.7-2.5 1-5.7.5-7.7 2.8-2.6 2.8-4 6.5-6.7 9.2-2.6 3.5-4.3 7.6-7.4 10.5 0 2.2 2.3 3.3 4 4 1.5.3 2.4 1.8 3.9 2.2 1.5.7 3.3.1 4.3-1.2 2-2 3.4-4.6 5.8-6.1 5.3-4 11.6-6.9 16.1-12 2.6-2.6 4.5-6.7 2.7-10.2-.8-1.4-1.8-3-3.4-3.3-2.1.2-1.1-.1-5.1 1.4z" class="e"/>
|
||||
<path d="m185.4 133 7 6.2c-2 3.9-4.2 7.6-6.5 11.2-1.8-2.2-3.6-4.3-5.3-6.6a82.4 82.4 0 0 0 4.8-10.9z" class="g"/>
|
||||
<path d="M182.5 141.4c-1.3 0-1.2 1.3-2.2 2.2a13.9 13.9 0 0 0-1.8 2.3c-.3-2.6 0-4.4-1.4-7-.3-.6-1.8-2.3-3-2.2-.6 0-2.4-.4-3.2.1a12.4 12.4 0 0 0-2.2 1.2c2.6 4 5.2 7.9 7.7 12a24 24 0 0 0 3 3.5l2.8 2.5a103.3 103.3 0 0 0 6-8.8 24.5 24.5 0 0 0-2.6-3.4c-1.1-1.3-1.7-2.5-3-2.4z" class="f"/>
|
||||
<path d="m168.3 138-.8.6c-.6 1-1 2 0 3 4 4.7 8.1 9.4 12.3 14 1.1 1.3 2 .5 3.3-.1-5-5.9-9.8-11.7-14.8-17.4z" class="e"/>
|
||||
<path d="m248 113 .1-10.8 12.6-4v10L248 113z" class="g"/>
|
||||
<path d="M257.4 109.2c1.3.7 2-.6 3.4-1a13.6 13.6 0 0 0 2.8-1.1c-.9 2.7-1.8 4.2-1.5 7.5 0 .7.7 3 1.8 3.7.4.3 1.8 1.7 2.5 1.8a9.4 9.4 0 0 0 2.3.2c-.4-5.3-.9-10.6-1.5-15.8a37.3 37.3 0 0 0-1-5l-1.3-4c-3.2 1.7-6.4 3-9.5 4.9a26.3 26.3 0 0 0 .3 4.7c.4 1.9.3 3.4 1.7 4.1z" class="f"/>
|
||||
<path d="M269 120.3c.3 0 .7.2.9 0a2.5 2.5 0 0 0 1.3-2.8c-1.4-6.8-2.9-13.7-4.5-20.4-.4-1.7-1.4-1.6-2.7-1.7l5 25z" class="e"/>
|
||||
<path d="m185.2 134.6 6.5 6.4m58-39.4.1 10.8" class="a"/>
|
||||
<path d="M188.5 86.7c.4.6.7 1.3.8 2 0 1.2-.4 2-1.4 2.3-2.8.6-5.8 0-8.1-1.7-1.6-1-3-2.4-2.8-4.5" class="d"/>
|
||||
<path d="M213.8 50.2a55.2 55.2 0 0 0-8.7 14.6 61.4 61.4 0 0 0-4.3 17.4 13.5 13.5 0 0 0-6.8-.7 14.2 14.2 0 0 0-5.5 2.2l-1 4.5-.7 3.2c4.6-.5 9.2.2 14-.2 2.2-.2 3-.5 4.6-3.1 3.6-6.4 7-14 11.2-20.2-1.8-5.8-1.5-11.7-2.8-17.7z" class="d"/>
|
||||
<path d="m186.7 91.3 1.4-8.5 6-1.1-1.4 9.7-6-.1z" class="b"/>
|
||||
<path d="M249 83.2a3.5 3.5 0 0 0 2.4-2.3 2.1 2.1 0 0 0-1-2.7c-.9-.3-3.4 1.4-1.8 1.8 1.6.4 2.2-2.2 1.9-3.6-.8-3-2.7-4-5.2-3a5.4 5.4 0 0 0-3.8 5.8c.8 4.5 4.2 4.8 7.6 4z" class="d"/>
|
||||
<path d="M235.1 82.3a33.1 33.1 0 0 1-.4-16.9c1.8-5.8 4.6-7.7 6.4-12.2a6.3 6.3 0 0 0 .2-2.6c.7-4.3-3.3-6.3-5.7-6.4A46.6 46.6 0 0 0 218 48c-4.5 1.9-5 4.2-7.1 7-2.1 3-1.5 6-1.3 8.2.5 5 2.3 7 3.8 11.1a84 84 0 0 1 4 15.4c5.9-2.8 12-4.5 17.8-7.4z" class="b"/>
|
||||
<path d="m234.4 58.2.2-.5a15.3 15.3 0 0 1 4.3.7l4 .8 1.9.3a6.3 6.3 0 0 0 3-.3c1.7-.5 3.6-.5 5.2-1.2m-35.8 26.8c2.8-2.9 6.5-4.4 9.9-6a67.1 67.1 0 0 1 6.4-2.8" class="a"/>
|
||||
<path fill="#6fc8b7" stroke="#6fc8b7" stroke-miterlimit="10" stroke-width=".6" d="M220.3 46.4c-1.4 1.5-5 6.6-.8 6.2 4.3-.4 7.4-4.4 10.5-7.6a77.4 77.4 0 0 0-9.7 1.4z"/>
|
||||
<path d="M238.3 48.2a2.8 2.8 0 0 0-5.2-1.6c-2.7 3.8-8.9 13.7-3.7 17 5.6 3.5 9.1-10.3 8.9-15.4z" class="f"/>
|
||||
<path d="M242.6 46c5.8 2.2 12.6 3.7 18.3 6.4 3.3 1.6 3.5 3.9 1.2 7.3-3.3 5-8.3 10.3-12 15.5-1.8-1-3.5-2-5.3-2.8 2.7-4.5 5.3-8.9 8.1-13.2-5.6-.4-10.5-.5-16.4-1.3-.6 0-3.9-1.1-3.6-3.4.2-1.3 0-3.7 1-5.3.3-.3.3-1.2 1.3-1.7 2.3-1.2 4.4-2.7 7.4-1.5z" class="d"/>
|
||||
<path d="m247.5 67.2 1.5.4 5 1.6c-1.1 2.3-2.6 4.6-4 7q-2.5-2-5.2-3.8" class="b"/>
|
||||
<path d="M224.6 37a117.1 117.1 0 0 0-5.9 10.2 65.3 65.3 0 0 1 13-2.9q.6-5.7 1.5-11.4l-8.6 4z" class="f"/>
|
||||
<path d="M218.3 29.7c2.1-6.6 7.7-10 13.8-10.6 4.2 1 7.3 2.6 7 7.6 0 .6 0 .6-.4 1.3-2.1 2.6-6 6-9 8.7-2.4 2.1-4 2.4-6.7 3h-.5c-3.5-2.6-6-4.3-4.2-10z" class="i"/>
|
||||
<path d="M233.6 29.7c1.2.2 3 .4 3.9 1.2.9.8.2 1.2.3 2.2a3.7 3.7 0 0 1 0 .8 3.5 3.5 0 0 1-.5 1 4.3 4.3 0 0 1-4.1 2.3c-1.6-.1-2-.5-2.2-2.1" class="i"/>
|
||||
<path d="M235.4 31.5a6.4 6.4 0 0 0 1-.7c3.5-6-3-3.8-1 .7z" class="j"/>
|
||||
<path d="M224.7 23c-2-4.7.8-8.1 4-11.3 1.6-1.5 5-3.5 7.3-3.2 2.3.2 3 1.5 2 3.8s-3.5 3.8-5.2 5.4c2.2-.2 6-1.8 7.3 1.4 1.4 4-2.4 6.4-5.5 6.8-3.1.3-8.1 1.2-9.9-2.9z" class="f"/>
|
||||
<path d="m233.7 27.9.6.9m1.9-1 .2.1m-2.6 1.9.7.2m-13.3-4.2c-2.1 1.4-4.3 2.7-6.4 4.2 1.4-.1 2.8.5 4.3.4l.6.1m-2.5 3c4.2.3 8.5-.8 12-3.1" class="a"/>
|
||||
<path d="M65.3 25.6c-1.9-2-.8-6 2-6.5.7-.2 1.7 0 .7.6-1.7 1.7-3.9 3.4-6.4 3.1a5.2 5.2 0 0 1-5-4c-.6-2.3.2-5 2-6.5 1.7-1.8 3.5-4 6.1-4 6.7-.1-1.8 5.7-.4 2.4.6-1.5 1.3-3 2.3-4.2 2.2-3.2 5.7-5.9 9.7-6 2.7 0 5.9 1.6 6.3 4.5 1.2 9.3-9.6 27-17.3 20.6Z" class="k"/>
|
||||
<path d="M11.9 82.2c1.3 1.3 2.3 1 4.5.6 2.1-.4 4.4-2.1 6.2-3.9 0 2.4-.8 5-1.3 7.1.5 1.7 2.5-.3 2.8-.8 1.1-2 2.4-3.7 3.9-5.4 1.6-1.4 4-1.2 5.4-2.5 1.9-1.8 2.2-2.8-.6-3.6-2.7-.7-2.8-3-7.2-.1-2.8 1.8-9.6 8.8-13.7 8.6z" class="e"/>
|
||||
<path d="M55.8 93.4c2.8 7 4.9 15 8.2 21.4 2 4 3.7 5 8 4.3 7.7-1.2 16-4 23.8-4.9l-.2-8.2c-1 .2-2.2.7-3.1.4a23.6 23.6 0 0 1-5-3 5.7 5.7 0 0 0-4.1-.3c-3.5 1.4-4.8 3.5-7.2 6.3l.6-24.6c-7 3-14 5.8-21 8.6z" class="b"/>
|
||||
<path d="M58.3 99.4c1.3 4.2.8 7 4.8 14.8.4 1 .8 1.4 2.1-.2l10.9-14c1-1.1 1-1.2 1-2 .2-6.5 1.1-10.7 2-16.5-6.7 6.3-13.9 12-20.8 18z" class="k"/>
|
||||
<path d="M63.5 95c2.8-2.6 5.6-5 8.4-7.4a140.2 140.2 0 0 1-15.8-3.7c-4 5.9-7.5 11.9-11.6 17.8-9.9 14-20.3 28-30.2 42A13.4 13.4 0 0 0 20 146c.4 0 .9.3 1.4.1.5-.2.5 0 1.1-.6.6-.6 0 .3 1.2-1 4-4.6 8-10 13-14.6a51.6 51.6 0 0 0 4.8-4 12.3 12.3 0 0 0 2.8-3c3.2-5 7-11.3 10.9-17.1a134.4 134.4 0 0 1 8.4-10.9z" class="b"/>
|
||||
<path d="M63.3 78.6c-5-.4-7 4.8-9.5 7.8-2.7 4-5.1 8-7.6 11.9-4.6 4.5 2.9 6.5 5.3 8.9 5 2.7 6.6-5.2 10.3-7.1 3.9-3.9 9.2-5.4 14.3-6.6 3.3-.8 5.1-3.2 6.5-6.5 1.4-4 .8-9.3-2.5-12.1a46.7 46.7 0 0 0-16.8 3.7z" class="k"/>
|
||||
<path d="m24.3 129.5 5.6 7.2-9.1 11.3c-1.9-2.4-3.7-4.8-5.6-7z" class="l"/>
|
||||
<path d="M17.9 138.4c-1.6 0-1.8 1.4-3 2.4a15.7 15.7 0 0 0-2.2 2.4c-.4-2.9-.3-4.7-2-7.2-.3-.6-1.7-2.3-3.1-2.3-.5 0-2.4-.4-3.2 0a15.7 15.7 0 0 0-2.4 1.1c2.7 4.2 5.3 8.4 8.2 12.5a27.5 27.5 0 0 0 3.3 3.6c1 1 2 1.8 3 2.6 2.3-3 4.9-5.8 7.1-9a21.8 21.8 0 0 0-2.3-3.5c-1.2-1.4-1.9-2.5-3.4-2.5z" class="f"/>
|
||||
<path d="m1.6 135-.8.4c-.6 1.3-.9 2 0 3.1 4.2 4.9 8.6 10 13 14.6 1.2 1.2 2.1.5 3.5 0 0 0 0-.1.1 0L1.6 135Z" class="m"/>
|
||||
<path d="m88.7 115.2 1-11 12-.4-1.5 10.3-11.5 1.1z" class="l"/>
|
||||
<path d="M97.2 114c1 1.2 1.8.2 3.1.3a9.6 9.6 0 0 0 2.7-.5c-1.2 2.8-2.2 4.4-2.5 7.8-.1.7 0 3.4 1 4.4.3.3 1.3 2 2 2.2a10 10 0 0 0 2.2.6l1.1-16.8a43.8 43.8 0 0 0-.3-5.6l-.6-4.4c-3.1 1-6.3 1.4-9.4 2.5a33.5 33.5 0 0 0-.2 4.7c0 2.1-.1 3.6.9 4.8z" class="f"/>
|
||||
<path d="M106 129c.2 0 .5.3.8.1 1-.7 1.7-1.1 1.6-2.7-.3-7.2-.5-14.9-1-22-.1-2-1-2-2.3-2.6h-.1l1 27.2z" class="m"/>
|
||||
<path d="m22.7 131.3 6.1 7.1M91.1 104l-.8 11.1" class="a"/>
|
||||
<path d="M59.9 50.2c-2.6 2.1-4.8 3.6-6.9 6.2a53.5 53.5 0 0 0-8.2 15.3c-2.1-.3-3.5-.8-5.8-1-2.7-.2-5.4 0-8 .6l.5 6.6v2.6c3.9-.3 9.4 1.6 12.5 2.5 3.1 1 3.8 1 5.9-1.2 5.6-5.5 9.7-12 14.9-17.8-1.8-4.8-3.4-8.8-5-13.8Z" class="b"/>
|
||||
<path d="M79 75.3a17 17 0 0 1-1.6-10.9c1-5 3.2-8.1 4.4-12.3a7 7 0 0 0 .1-2.5c.3-4-3.8-5-5.9-4.6-5 .8-10.7 3-14.7 4-3.9.8-4.3 2.6-6.6 4.8-2.3 2.2-1.7 4.8-1.7 6.8 0 4.3 1.8 6.8 2.6 10.3 1 4 1.2 7.2 1.4 11.3 7.6-2 14.8-3.5 22.1-6.9Z" class="m"/>
|
||||
<path d="M63.2 48.1c-1.7 1.7-3.7 4.7-.2 4.8 3.4.1 6.3-3.3 8.7-6.4L63.2 48z" class="b"/>
|
||||
<path d="M81.3 47.7c-.4-2.8-5-2.9-6.6 0-2.6 4.3-7.6 13-2 15.5 6 2.7 9.6-9.9 8.6-15.5z" class="f"/>
|
||||
<path d="m66.8 37.6-4 11c3.8-1.2 7.6-1.8 11.4-3.2l1.5-9.6-9 1.8z" class="k"/>
|
||||
<path d="m30 80.1-.3-8.1a62.1 62.1 0 0 1 9.4-1.5 6.5 6.5 0 0 1 .9 4 19.4 19.4 0 0 1-2.6 6.3 55.3 55.3 0 0 0-7.3-.7z" class="f"/>
|
||||
<path d="M84.2 52.5c2.1 6 3.9 12.3 6.1 18.2 2.5 0 5.5-.6 7.8-.5a54.5 54.5 0 0 1 14.2 1.8 1.6 1.6 0 0 1 0 3c-7 2.4-15.4 4.2-22.3 5.8a6.5 6.5 0 0 1-5.7-1.5 43 43 0 0 1-10-19c-1-3.8 0-8.7 2.3-12 .5-.8 1.6-.7 2.4-.8h.1c2.2 1.2 4 2.1 5 5z" class="b"/>
|
||||
<path d="M95.6 70.1c.8.9 3.6 8 10.2 6.7 6.6-1.3 6.9-10.7-10.2-6.7z" class="c"/>
|
||||
<path d="M56 81.7c3 .4 6 1.1 9 .3a42.4 42.4 0 0 0 15.1-7.2" class="a"/>
|
||||
<path d="M60.7 34.3c2.2-5.9 7.3-8.1 12.4-8.7a9.4 9.4 0 0 1 3.2 1.7 5.7 5.7 0 0 1 2.5 3.7c0 .6-.4.8-.6 1.1l-5.3 6.7a32.9 32.9 0 0 1-5.1 3.2 12.8 12.8 0 0 1-3.2 1h-.4c-2.6-2.5-5.5-3.6-3.5-8.7z" class="e"/>
|
||||
<path d="M75.6 32.6c1.3.2 2.7.6 3.7 1.5.3.3.2.7.1 1v1.1c0 .8-.6 1.3-1 1.9a4 4 0 0 1-4.4.8c-.5-.2-.7-.8-.8-1.3l-.1-.4" class="e"/>
|
||||
<path d="m62.9 31.2-1.6 1.5-1.8 1.5c0 .2-.5.4-.5.5l3.1.4.6.2" class="a"/>
|
||||
<path d="M79.7 28.6c-4.6-1.5-11-5.8-15.1-4.9-.6.7-.6 1.7-1.2 2.3-.4 1-.2 2.3.8 2.7 3.8 0 12.2 4.9 14.6 4.3.5-2.4 1.4-1.7.9-4.4z" class="n"/>
|
||||
<path d="M83.4 29.3c-2.1 1.5-5.4 2-7.5.2-2-2-.9-6 2-6.5.7-.3 1.8 0 .6.7-1.7 1.8-4 3.5-6.7 3A5.3 5.3 0 0 1 67 22c5-6 11.8-17.7 18.8-17.8 2.9-.4 6.3 1 7 4 .9 3.5-1.5 7.3-5 7.7-4-.3 2.9-4 4.1-4 2.2-.1 4 1.4 4.8 3.3 1.3 3.4-.3 7.1-2.5 9.7a13 13 0 0 1-7.7 4.5c-2.2 0-4.4-2.3-3.3-4.5 2.2-1.2 4.6.5 1.3 3.7l-1 .6z" class="k"/>
|
||||
<path d="M63.9 27.6c5.6 2.5 10.4 4.3 15.4 6.4" class="a"/>
|
||||
<path fill="#fff" stroke="#24093b" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.6" d="M14.5 84.1c1.4-1.7 3.3-2.7 5-3.9 1-.7 1.8-1.6 2.8-2.2m29.4 32.6a26 26 0 0 0 7.2-7.2l3.5-3"/>
|
||||
<path d="M65.7 34.6c-.2 2-2 4.8 1 5.7M37.4 80.8c4.4 0 8.8 6.8 12.4-.2l4.6-3.3v0" class="a"/>
|
||||
<path d="M155.4 28.1c-.9 4.1-3.8 7.2-7.5 6.6-3.7-.6-5.4-4.2-4.5-8.3 1-4.2 4-7.1 7.8-6.5 3.8.6 5.2 4.2 4.3 8.3z" class="k"/>
|
||||
<path d="M142.8 36.6c1.2-2.9 2.6-4.2 5.2-4.8" class="a"/>
|
||||
<path d="M136.3 93.5c2.3 7.6 3.4 16 6.8 23 2.1 4.3 3.6 5.4 8.2 4.8 8.6-1 17.9-4.6 26.3-7.8l-1.1-8a7.9 7.9 0 0 1-3.4 1.2c-2 0-4-1.1-5.8-1.7-1.3-.6-3.7 0-4.5.4-3.4 1.7-5 3.7-7.3 6.4-.4-8-.3-16-.2-24-6.4 1.8-12.7 3.7-19 5.7z" class="o"/>
|
||||
<path d="M138 100c1.5 5.7 2.8 10.7 4.6 16.3.5 1.5.8.8 1.3.3a81 81 0 0 0 11-14.2c1-1.3.5-2.6.8-4.3l1.8-13-19.4 14.8Z" class="g"/>
|
||||
<path d="M142.3 115.6c-1.1-3.5.1-9.5 2-11.7 2.4-3 5.2-2.8 7.5-4.4 1.8-1.1 3.4-4.1 3.9-5.1 1.8-4.5 1.6-7 2-11.3-8.6 8.4-16.2 15.5-19 19.1 1 4.5 2.1 9.1 3.6 13.4z" class="f"/>
|
||||
<path fill="#cf63a6" stroke="#cf63a6" stroke-miterlimit="10" d="m143.2 96 8-6.4c-4.6-1.7-9-3.6-13.5-5.4-4 4.9-7.7 10-11.5 15l-1 1.4c-10.4 14.8-21 29.6-31.1 44.7a11 11 0 0 0 5.5 2.1c.4 0 .9.4 1.4.1.4-.2.5 0 1-.6.6-.7 0 .3 1.2-1.2 4-5 8.2-10.7 13.2-15.7.9-.8 3.4-2.4 5-4.1 1-.9 2-1.9 2.7-3 3.2-4.9 7-11.2 10.8-16.9 2.6-3.5 5.3-6.8 8.3-10z"/>
|
||||
<path d="M156.7 82.7c-.4 5.6-.4 6.6-3.8 11.1-1 1.1-2.2 2-3.6 2.7-2.3 1-5.5 2.6-8 3.6l-5.3 5.3c-.9 1.2-.6 1.6-3.7 1-3.8-1-8-3.8-8.2-5 0-.8 1.2-2 1.6-2.6 5-6.5 8.2-10.5 13-17 5.5.2 12.5 1 18 1z" class="g"/>
|
||||
<path d="m104.5 129.7 5.1 7.7-9.2 12-5.4-7 9.5-12.7z" class="h"/>
|
||||
<path d="M97.6 139.7c-1.5 0-1.8 1.5-3 2.5a16.3 16.3 0 0 0-2.1 2.5c-.3-2.9-.2-4.6-1.8-7.3-.3-.6-1.8-2.2-3-2.2-.6 0-2.4-.5-3 0a13.6 13.6 0 0 0-2.3 1c2.6 4.1 5.1 8.4 7.8 12.5a28.8 28.8 0 0 0 3 3.6l3 2.7c2.3-3 4.9-6 7-9.2a22.9 22.9 0 0 0-2.2-3.6c-1.3-1.4-1.8-2.6-3.4-2.5z" class="f"/>
|
||||
<path d="M82 136.2c-.2.2-.6.2-.7.5-.6 1-1 2 0 3 4 5 8.1 10 12.4 14.8 1 1.3 2 .6 3.3 0l-15-18.3Z" class="e"/>
|
||||
<path d="m169.4 116.6.4-11.2c4.4-1.3 8.7-2.7 13.1-3.8l-.1 10.1-13.4 5z" class="h"/>
|
||||
<path d="M179.2 112.7c1.4.8 2.3-.5 3.7-.8a13.8 13.8 0 0 0 2.8-1.1c-.9 2.8-1.8 4.5-1.5 7.8 0 .7.6 3 1.7 3.8.4.3 1.6 1.7 2.4 1.8a11 11 0 0 0 2.3.3c-.4-5.4-.7-10.7-1.3-16.1a38 38 0 0 0-1-5.2c-.3-1.6-.7-2.7-1.2-4.2-3.2 1.6-6.6 2.8-9.7 4.7a26.9 26.9 0 0 0 .2 4.7c.3 2 .2 3.5 1.6 4.3z" class="f"/>
|
||||
<path d="M191 124.6c.1 0 .5.2.7 0 1-.7 1.6-1.3 1.3-2.8-1.2-7-2.5-14-4-21-.5-1.8-1.5-1.6-2.8-2 0 0 0 .1 0 0l4.7 25.8z" class="e"/>
|
||||
<path d="M102.8 131.7c2 2.8 4 5 5.6 7.6m63-34.5-.1 11.3" class="a"/>
|
||||
<path d="M139.9 50.3a15.3 15.3 0 0 0-6.7 6.3c-1.2 2.3-1.4 4-3 8.9l-2.5 7a21.1 21.1 0 0 1-8-1.8 24.7 24.7 0 0 1-5.5-3.7 33.1 33.1 0 0 1-3.8 5.2c-.5.5-1.4.9-2 1.4a35.7 35.7 0 0 0 14 8.6c3 .9 5.5 1.6 8.2.9 8-2.1 11.3-14.8 11.7-16.4-.8-5.6-1.5-11-2.4-16.4z" class="o"/>
|
||||
<path d="M108.2 74c7-3.3 5.8-3 10.9-3.7-.2 1.4-.1 2.8-.8 4-1 1.7-2.6 3-4.5 3.7-2.2-2-3.3-2-5.6-4zM158 84.5c-1.7-4.3-.6-13.4.4-16.7 1.8-5.8 4.1-8.2 5.8-12.7.3-.9.4-1.8.3-2.7.7-4-3-6.5-5.1-6.6-5.2-.3-11.3 1.5-15.2 2.4-4 .8-4 2.6-6 4.7-2.2 2.1-1.5 5-1.5 7 0 4.5 1.6 7.3 2 11.4.6 4.7-.1 9.3-.8 14 6.7-.6 13.4.3 20.2-.8z" class="j"/>
|
||||
<path d="M145.6 47.2c-1.3 1.6-3.5 5 0 5.3 3.4.4 6.3-3.2 8.5-6q-4.2.2-8.5.7z" class="o"/>
|
||||
<path d="M164 49.8c-.5-3-5.2-4.4-6.8-1.7-2.7 4.2-8.2 12.4-3 16.3 6 4.3 10.7-8.5 9.8-14.6z" class="f"/>
|
||||
<path d="M148.9 36q-2 5.8-3.7 11.8l10.5-2 2.3-10.9-9.1 1z" class="k"/>
|
||||
<path d="M165 35.3a12.8 12.8 0 0 0-8.7-12.3 10.7 10.7 0 0 0-3.8 1 8.8 8.8 0 0 0-4 3.6 1.5 1.5 0 0 0 0 1.4c.9 3 2 5.5 3 8.3a28 28 0 0 0 4 4.5 11.8 11.8 0 0 0 2.8 1.9h.4c3.4-2.2 6.3-2.9 6.2-8.4z" class="n"/>
|
||||
<path d="M151 30a8 8 0 0 0-4.3 1c-.5.4-.4 1-.6 1.5-.5 1.3-.5 3 .7 4 .9.9 2.3 1.1 3.5.7.7-.2 1.1-1 1.5-1.6" class="n"/>
|
||||
<path d="M163 31.3c1 .9 1.6 2 2.5 3l1.6 1.7c.1.4-.7 0-1 .1l-2-.1-1.4-.2" class="a"/>
|
||||
<path d="M167 29.3c4.6 5.4 11.5 10 18.9 8.6 5-1 10-5.6 9.1-11-.4-3.6-4.4-5.7-7.7-4.4-3.2 1-6.5 3.4-10 1.9-3.8-2-5.9-6-9.7-8-6.1-4-15.5-2.7-19.4 3.7a6.4 6.4 0 0 0 1.4 6c2.9 2.7 7.3.4 13 1.6 1.8-.3 3.5-.6 4.4 1.6Zm8.2 6.7-.2-.1zm.8.4c-.5.4 0-.6 0 0zm-1.1-1.7h-.1z" class="k"/>
|
||||
<path d="M164.6 50.5c1.8 2 2.3 4.9 2.9 7.5.8 4.4 1.2 8.9 2.2 13.3.2 1.4.4 2.8 2.2 2.7 2.7.7 5.9 1.3 7.6 3.7 1.5 2.3.6 5.3 2 7.6.3 1 2.5 2.5.5 1.9-4.4-.5-8.4-2.4-12.7-2.8-6.1 0-7.8-7.5-10.1-12-2.4-6.4-4.5-14-2.1-20.8.5-2.3 2.8-4 5.1-2.5l1.3.6z" class="o"/>
|
||||
<path fill="none" stroke="#fed916" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.6" d="M148.1 31.8c2.2 1.2 3.7 3 3.2 6s-2.2 4.8-4.8 4.5c-2.6-.4-3.4-3-3.7-5.6"/>
|
||||
<path d="M138 83.4c2.4 1.3 4.5 3 7.2 3.4 4.5.7 8.7-.3 13.1-1.6m-20.4 0L128.5 97m29.9-60.2a8 8 0 0 0 5.3 2.6m23.2 32.4 1.4-14.8" class="a"/>
|
||||
<path d="M186.2 88.7c-4.8.9-10.2-1.5-12-6.2-2.9-6.7 1.9-14.6 8.5-16.6 5-2 11.6 0 14 5.2 3.3 7.5-2.8 16.2-10.5 17.6Z" class="g"/>
|
||||
<path d="M186.2 88.7c-5.2 1-11-2-12.5-7.3-2.2-7.8 4.8-16 12.7-16.3 5.5-.5 10.8 4 11.1 9.6.6 6.7-4.9 12.8-11.3 14Z" class="a"/>
|
||||
<path d="M185.6 84.4c-3.6.7-7.5-2-7.8-5.7-.2-2.7 1-5.5 3-7.2 1.5-1.2 3.6-2 5.5-2 2.1 0 4.3.8 5.5 2.6 1.9 2.4 1.6 6 0 8.4a8.5 8.5 0 0 1-4.1 3.4c-.7.1-1.4.5-2.1.5Z" class="a"/>
|
||||
<path d="M185.2 81c-2.5.4-4.7-2.3-3.7-4.7.7-2.8 4.6-4.6 6.8-2.4 2 1.5 1.5 4.5-.3 6-.8.7-1.8 1-2.8 1.1Zm10.3-19.2c-2.1.4-4.3 1-6.4 1.3m-79.6-5-1.8-14.7" class="a"/>
|
||||
<path d="m112.5 74.7-.7.3-.8.2-.7.1-.8.2-.8.1-.8-.1h-1.6l-.8-.1-.7-.3-.8-.2-.8-.1-.7-.4-.7-.4-.6-.5-.7-.4-.6-.6-.6-.5-.4-.7-.5-.6-.5-.6-.3-.8-.4-.7-.2-.8-.2-.7-.1-.8-.1-.8-.1-.8.1-.7v-.8l.2-.8.1-.8.3-.7.3-.8.3-.7.3-.7.5-.7.4-.6.5-.6.5-.7.5-.5.6-.6.6-.5.6-.5.7-.5.7-.3.7-.3.7-.4.7-.4.8-.1.7-.2.8-.3.8-.1.8-.1.8.1h.8l.8-.1.8.2.7.2.8.2.7.4.8.3.7.3.7.5.6.4.5.6.7.5.5.6.5.6.3.7.4.8.3.7.3.7.2.8.1.8.1.8v.7l.1.8-.2.8-.1.8-.2.8-.2.7-.4.7-.2.8-.4.7-.4.6-.4.7-.6.6-.4.7-.6.5-.5.6-.6.5-.6.5-.7.5-.7.3-.7.3-.7.4z" class="n"/>
|
||||
<path d="M112.5 74.7c-1.4.5-2.8.9-4.3.8-1.5.1-3-.1-4.5-.6a9.6 9.6 0 0 1-4-2.6 11 11 0 0 1-2.6-4.1c-.9-3-.7-6.1.7-8.8 1-2 2.5-3.7 4.2-5.1 1-1 2.4-1.4 3.6-2 1.7-.4 3.3-1.2 5-.9 2.3-.3 4.5.8 6.5 1.8 1.2 1 2.4 2 3.2 3.3a12 12 0 0 1 1.5 4.8 13.4 13.4 0 0 1-7.8 12.8l-1.5.6z" class="a"/>
|
||||
<path d="M111.1 70.7c-2.3.8-5.1.5-7.1-1-2.1-1.5-3.4-4.4-2.6-7a8.7 8.7 0 0 1 4.2-5.7 8.6 8.6 0 0 1 7-.8c3.1.9 5.1 4.5 4.4 7.7-.3 2.7-2.3 5-4.6 6.2l-1.3.6z" class="a"/>
|
||||
<path d="M109.8 67.5c-4 1.2-6.7-4.3-3.5-6.8 2.5-3 8.2-.9 7.1 3.2-.3 1.8-2 3-3.6 3.6zm6.5-20.9-6.3 2.5m-54.1 83.1c-5.3 0-12 8.7-14.5 7.3 2.4-5.3 9.2-7 14.5-7.3Zm72.3 10.1c-4.1 7.4-14 8.5-21.6 7.8a53 53 0 0 1 20.6-8.4l.7.1zm58.1 9.7c4.3-5.7 10.2-22 21-17.9 5 1.7 4.4 10.7-.9 10.7 4.4-9.1 16.3-14.3 25.6-9.3M73.4 60.8c3.3 6.7 6 14 11.6 19.2 6.5 1.4 13.3-1.4 19.7-2.6l5.6-1.5" class="a"/>
|
||||
<path d="M161.5 158c1.6-3.3 5.6-4.4 8.8-5.6 10-3 20.6-4.7 31-3.5 2.8 0 8.5 2.5 4.4 5.3-1.4 1.1-4 3.3 1.7-2.3a20 20 0 0 1 12.7-5c5.2 0 11 .9 15 4.6 2.2 1.6-1.3 6-2 5.5 2.8-1.9 6.5-1.8 9.7-2 3.2.2 6.8 0 9.5 1.9l.2.4-.5.6" class="o"/>
|
||||
<path d="M21.9 158.4a204 204 0 0 1 43-9.4c10.8-.8 23-2.8 33.4 1.5 2.4 4.1-7.3 4.4-10.4 5.5-2.6.5-7.7 1.6-2.5-.8a51 51 0 0 1 33.4-1.5c-1.7 3.8-8.9 3-12.8 4.4a207.6 207.6 0 0 1 77.6.6" class="o"/>
|
||||
<path d="M15 159.2c6.2-5.7 14.2-9 22.2-11.2 4.2-.9 9.4-1 12.4 2.5 1.4 3.4-3 5.2-5.4 6.3-1 .9-2.8.8-1.1-.6 3.3-4 9-5 13.9-3.7 3.4.8 1.2 5.5-1.6 5.3l-2 .8c10-4.6 22.1-4.7 32.2-.4 1.5.4 2.8 1.8.6 1.4l-71.2-.4Z" class="j"/>
|
||||
<path fill="#6fc8b7" stroke="#6fc8b7" stroke-linecap="round" stroke-linejoin="round" d="M213.7 157.6c4.8-2.7-9-5.2.4-6.8a43 43 0 0 1 8-.8c5.6 0 9.7 1.6 9.3 3-.5 1.5-4.2 2.7-8.5 3.6a10.4 10.4 0 0 1 5.5-3c3.2-.7 8.8-.8 11.5 0 1.8.7 1.7 1.5.4 2.2a27.6 27.6 0 0 1-5.6 1.7 66.4 66.4 0 0 1 20-2.2c7.7.2 15 1.4 19 3.1z"/>
|
||||
</svg>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
You are most likely accessing this website because you've had some issue with
|
||||
the traffic coming from this IP. This router is part of the <a
|
||||
href="https://www.torproject.org/">Tor Anonymity Network</a>, which is
|
||||
dedicated to <a href="https://2019.www.torproject.org/about/overview">providing
|
||||
privacy</a> to people who need it most: average computer users. This
|
||||
router IP should be generating no other traffic, unless it has been
|
||||
compromised.</p>
|
||||
|
||||
<p>
|
||||
Tor works by running user traffic through a random chain of encrypted
|
||||
servers, and then letting the traffic exit the Tor network through an
|
||||
exit node like this one. This design makes it very hard for a service to
|
||||
know which user is connecting to it, since it can only see the IP-address
|
||||
of the Tor exit node:</p>
|
||||
|
||||
<p style="text-align:center;margin:40px 0">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="500" viewBox="0 0 490.28 293.73" style="width:100%;max-width:600px">
|
||||
<desc>Illustration showing how a user might connect to a service through the Tor network. The user first sends their data through three daisy-chained encrypted Tor servers that exist on three different continents. Then the last Tor server in the chain connects to the target service over the normal internet.</desc>
|
||||
<defs>
|
||||
<style>
|
||||
.t{
|
||||
fill: var(--text-color);
|
||||
stroke: var(--text-color);
|
||||
}
|
||||
</style>
|
||||
</defs>
|
||||
<path fill="#6fc8b7" d="M257.89 69.4c-6.61-6.36-10.62-7.73-18.36-8.62-7.97-1.83-20.06-7.99-24.17-.67-3.29 5.85-18.2 12.3-16.87 2.08.92-7.03 11.06-13.28 17-17.37 8.69-5.99 24.97-2.87 26.1-10.28 1.04-6.86-8.33-13.22-8.55-2.3-.38 12.84-19.62 2.24-8.73-6.2 8.92-6.9 16.05-9.02 25.61-6.15 12.37 4.83 25.58-2.05 33.73-.71 12.37-2.01 24.69-5.25 37.39-3.96 13 .43 24.08-.14 37.06.63 9.8 1.58 16.5 2.87 26.37 3.6 6.6.48 17.68-.82 24.3 1.9 8.3 4.24.44 10.94-6.89 11.8-8.79 1.05-23.59-1.19-26.6 1.86-5.8 7.41 10.75 5.68 11.27 14.54.57 9.45-5.42 9.38-8.72 16-2.7 4.2.3 13.93-1.18 18.45-1.85 5.64-19.64 4.47-14.7 14.4 4.16 8.34 1.17 19.14-10.33 12.02-5.88-3.65-9.85-22.04-15.66-21.9-11.06.27-11.37 13.18-12.7 17.52-1.3 4.27-3.79 2.33-6-.63-3.54-4.76-7.75-14.22-12.01-17.32-6.12-4.46-10.75-1.17-15.55 2.83-5.63 4.69-8.78 7.82-7.46 16.5.78 9.1-12.9 15.84-14.98 24.09-2.61 10.32-2.57 22.12-8.81 31.47-4 5.98-14.03 20.12-21.27 14.97-7.5-5.34-7.22-14.6-9.56-23.08-2.5-9.02.6-17.35-2.57-26.2-2.45-6.82-6.23-14.54-13.01-13.24-6.5.92-15.08 1.38-19.23-2.97-5.65-5.93-6-10.1-6.61-18.56 1.65-6.94 5.79-12.64 10.38-18.63 3.4-4.42 17.45-10.39 25.26-7.83 10.35 3.38 17.43 10.5 28.95 8.57 3.12-.53 9.14-4.65 7.1-6.62zm-145.6 37.27c-4.96-1.27-11.57 1.13-11.8 6.94-1.48 5.59-4.82 10.62-5.8 16.32.56 6.42 4.34 12.02 8.18 16.97 3.72 3.85 8.58 7.37 9.3 13.1 1.24 5.88 1.6 11.92 2.28 17.87.34 9.37.95 19.67 7.29 27.16 4.26 3.83 8.4-2.15 6.52-6.3-.54-4.54-.6-9.11 1.01-13.27 4.2-6.7 7.32-10.57 12.44-16.64 5.6-7.16 12.74-11.75 14-20.9.56-4.26 5.72-13.86 1.7-16.72-3.14-2.3-15.83-4-18.86-6.49-2.36-1.71-3.86-9.2-9.86-12.07-4.91-3.1-10.28-6.73-16.4-5.97zm11.16-49.42c6.13-2.93 10.58-4.77 14.61-10.25 3.5-4.28 2.46-12.62-2.59-15.45-7.27-3.22-13.08 5.78-18.81 8.71-5.96 4.2-12.07-5.48-6.44-10.6 5.53-4.13.38-9.2-5.66-8.48-6.12.8-12.48-1.45-18.6-1.73-5.3-.7-10.13-1-15.45-1.37-5.37-.05-16.51-2.23-25.13.87-5.42 1.79-12.5 5.3-16.73 9.06-4.85 4.2.2 7.56 5.54 7.45 5.3-.22 16.8-5.36 20.16.98 3.68 8.13-5.82 18.29-5.2 26.69.1 6.2 3.37 11 4.74 16.98 1.62 5.94 6.17 10.45 10 15.14 4.7 5.06 13.06 6.3 19.53 8.23 7.46.14 3.34-9.23 3.01-14.11 1.77-7.15 8.49-7.82 12.68-13.5 7.14-7.72 16.41-13.4 24.34-18.62zM190.88 3.1c-4.69 0-13.33.04-18.17-.34-7.65.12-13.1-.62-19.48-1.09-3.67.39-9.09 3.34-5.28 7.04 3.8.94 7.32 4.92 7.1 9.31 1.32 4.68 1.2 11.96 6.53 13.88 4.76-.2 7.12-7.6 11.93-8.25 6.85-2.05 12.5-4.58 17.87-9.09 2.48-2.76 7.94-6.38 5.26-10.33-1.55-1.31-2.18-.64-5.76-1.13zm178.81 157.37c-2.66 10.08-5.88 24.97 9.4 15.43 7.97-5.72 12.58-2.02 17.47 1.15.5.43 2.65 9.2 7.19 8.53 5.43-2.1 11.55-5.1 14.96-11.2 2.6-4.62 3.6-12.39 2.76-13.22-3.18-3.43-6.24-11.03-7.7-15.1-.76-2.14-2.24-2.6-2.74-.4-2.82 12.85-6.04 1.22-10.12-.05-8.2-1.67-29.62 7.17-31.22 14.86z"/>
|
||||
<g fill="none">
|
||||
<path stroke="#cf63a6" stroke-linecap="round" stroke-width="2.76" d="M135.2 140.58c61.4-3.82 115.95-118.83 151.45-103.33"/>
|
||||
<path stroke="#cf63a6" stroke-linecap="round" stroke-width="2.76" d="M74.43 46.66c38.15 8.21 64.05 42.26 60.78 93.92M286.65 37.25c-9.6 39.44-3.57 57.12-35.64 91.98"/>
|
||||
<path stroke="#e4c101" stroke-dasharray="9.06,2.265" stroke-width="2.27" d="M397.92 162.52c-31.38 1.26-90.89-53.54-148.3-36.17"/>
|
||||
<path stroke="#cf63a6" stroke-linecap="round" stroke-width="2.77" d="M17.6 245.88c14.35 0 14.4.05 28-.03"/>
|
||||
<path stroke="#e3bf01" stroke-dasharray="9.06,2.265" stroke-width="2.27" d="M46.26 274.14c-17.52-.12-16.68.08-30.34.07"/>
|
||||
</g>
|
||||
<g transform="translate(120.8 -35.81)">
|
||||
<circle cx="509.78" cy="68.74" r="18.12" fill="#240a3b" transform="translate(-93.3 38.03) scale(.50637)"/>
|
||||
<circle cx="440.95" cy="251.87" r="18.12" fill="#240a3b" transform="translate(-93.3 38.03) scale(.50637)"/>
|
||||
<circle cx="212.62" cy="272.19" r="18.12" fill="#240a3b" transform="translate(-93.3 38.03) scale(.50637)"/>
|
||||
<circle cx="92.12" cy="87.56" r="18.12" fill="#240a3b" transform="translate(-93.3 38.03) scale(.50637)"/>
|
||||
<circle cx="730.88" cy="315.83" r="18.12" fill="#67727b" transform="translate(-93.3 38.03) scale(.50637)"/>
|
||||
<circle cx="-102.85" cy="282.18" r="9.18" fill="#240a3b"/>
|
||||
<circle cx="-102.85" cy="309.94" r="9.18" fill="#67727b"/>
|
||||
</g>
|
||||
<g class="t">
|
||||
<text xml:space="preserve" x="-24.76" y="10.37" stroke-width=".26" font-size="16.93" font-weight="700" style="line-height:1.25" transform="translate(27.79 2.5)" word-spacing="0"><tspan x="-24.76" y="10.37">The user</tspan></text>
|
||||
<text xml:space="preserve" x="150.63" y="196.62" stroke-width=".26" font-size="16.93" font-weight="700" style="line-height:1.25" transform="translate(27.79 2.5)" word-spacing="0"><tspan x="150.63" y="196.62">This server</tspan></text>
|
||||
<text xml:space="preserve" x="346.39" y="202.63" stroke-width=".26" font-size="16.93" font-weight="700" style="line-height:1.25" transform="translate(27.79 2.5)" word-spacing="0"><tspan x="346.39" y="202.63">Your service</tspan></text>
|
||||
<text xml:space="preserve" x="34.52" y="249.07" stroke-width=".26" font-size="16.93" font-weight="700" style="line-height:1.25" transform="translate(27.79 2.5)" word-spacing="0"><tspan x="34.52" y="249.07">Tor encrypted link</tspan></text>
|
||||
<text xml:space="preserve" x="34.13" y="276.05" stroke-width=".26" font-size="16.93" font-weight="700" style="line-height:1.25" transform="translate(27.79 2.5)" word-spacing="0"><tspan x="34.13" y="276.05">Unencrypted link</tspan></text>
|
||||
<path fill="none" stroke-linecap="round" stroke-width="1.67" d="M222.6 184.1c-2.6-15.27 8.95-23.6 18.43-38.86m186.75 45.61c-.68-10.17-9.4-17.68-18.08-23.49"/>
|
||||
<path fill="none" stroke-linecap="round" stroke-width="1.67" d="M240.99 153.41c.35-3.41 1.19-6.17.04-8.17m-7.15 5.48c1.83-2.8 4.58-4.45 7.15-5.48"/>
|
||||
<path fill="none" stroke-linecap="round" stroke-width="1.67" d="M412.43 173.21c-2.2-3.15-2.54-3.85-2.73-5.85m0 0c2.46-.65 3.85.01 6.67 1.24M61.62 40.8C48.89 36.98 36.45 27.54 36.9 18.96M61.62 40.8c.05-2.58-3.58-4.8-5.25-5.26m-2.65 6.04c1.8.54 6.8 1.31 7.9-.78"/>
|
||||
<path fill="none" stroke-linecap="round" stroke-linejoin="round" stroke-width="2.44" d="M1.22 229.4h247.74v63.1H1.22z"/>
|
||||
</g>
|
||||
</svg>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<a href="https://2019.www.torproject.org/about/overview">Read more about how Tor works.</a></p>
|
||||
|
||||
<p>
|
||||
Tor sees use by <a href="https://2019.www.torproject.org/about/torusers">many
|
||||
important segments of the population</a>, including whistle blowers,
|
||||
journalists, Chinese dissidents skirting the Great Firewall and oppressive
|
||||
censorship, abuse victims, stalker targets, the US military, and law
|
||||
enforcement, just to name a few. While Tor is not designed for malicious
|
||||
computer users, it is true that they can use the network for malicious ends.
|
||||
In reality however, the actual amount of <a
|
||||
href="https://support.torproject.org/abuse/">abuse</a> is quite low. This
|
||||
is largely because criminals and hackers have significantly better access to
|
||||
privacy and anonymity than do the regular users whom they prey upon. Criminals
|
||||
can and do <a
|
||||
href="https://web.archive.org/web/20200131013910/http://voices.washingtonpost.com/securityfix/2008/08/web_fraud_20_tools.html">build,
|
||||
sell, and trade</a> far larger and <a
|
||||
href="https://web.archive.org/web/20200131013908/http://voices.washingtonpost.com/securityfix/2008/08/web_fraud_20_distributing_your.html">more
|
||||
powerful networks</a> than Tor on a daily basis. Thus, in the mind of this
|
||||
operator, the social need for easily accessible censorship-resistant private,
|
||||
anonymous communication trumps the risk of unskilled bad actors, who are
|
||||
almost always more easily uncovered by traditional police work than by
|
||||
extensive monitoring and surveillance anyway.</p>
|
||||
|
||||
<p>
|
||||
In terms of applicable law, the best way to understand Tor is to consider it a
|
||||
network of routers operating as common carriers, much like the Internet
|
||||
backbone. However, unlike the Internet backbone routers, Tor routers
|
||||
explicitly do not contain identifiable routing information about the source of
|
||||
a packet, and no single Tor node can determine both the origin and destination
|
||||
of a given transmission.</p>
|
||||
|
||||
<p>
|
||||
As such, there is little the operator of this router can do to help you track
|
||||
the connection further. This router maintains no logs of any of the Tor
|
||||
traffic, so there is little that can be done to trace either legitimate or
|
||||
illegitimate traffic (or to filter one from the other). Attempts to
|
||||
seize this router will accomplish nothing.</p>
|
||||
|
||||
<!-- FIXME: US-Only section. Remove if you are a non-US operator -->
|
||||
|
||||
<p>
|
||||
Furthermore, this machine also serves as a carrier of email, which means that
|
||||
its contents are further protected under the ECPA. <a
|
||||
href="https://www.law.cornell.edu/uscode/text/18/2707">18
|
||||
USC 2707</a> explicitly allows for civil remedies ($1000/account
|
||||
<i>plus</i> legal fees)
|
||||
in the event of a seizure executed without good faith or probable cause (it
|
||||
should be clear at this point that traffic with an originating IP address of
|
||||
FIXME_DNS_NAME should not constitute probable cause to seize the
|
||||
machine). Similar considerations exist for 1st amendment content on this
|
||||
machine.</p>
|
||||
|
||||
<!-- FIXME: May or may not be US-only. Some non-US tor nodes have in
|
||||
fact reported DMCA harassment... -->
|
||||
|
||||
<p>
|
||||
If you are a representative of a company who feels that this router is being
|
||||
used to violate the DMCA, please be aware that this machine does not host or
|
||||
contain any illegal content. Also be aware that network infrastructure
|
||||
maintainers are not liable for the type of content that passes over their
|
||||
equipment, in accordance with <a
|
||||
href="https://www.law.cornell.edu/uscode/text/17/512">DMCA
|
||||
"safe harbor" provisions</a>. In other words, you will have just as much luck
|
||||
sending a takedown notice to the Internet backbone providers. Please consult
|
||||
<a href="https://community.torproject.org/relay/community-resources/eff-tor-legal-faq/tor-dmca-response/">EFF's prepared
|
||||
response</a> for more information on this matter.</p>
|
||||
|
||||
<p>For more information, please consult the following documentation:</p>
|
||||
|
||||
<div class="links">
|
||||
<a href="https://2019.www.torproject.org/about/overview">Tor Overview</a>
|
||||
<a href="https://support.torproject.org/abuse/">Tor Abuse FAQ</a>
|
||||
<a href="https://community.torproject.org/relay/community-resources/eff-tor-legal-faq/">Tor Legal FAQ</a>
|
||||
</div>
|
||||
|
||||
<p>
|
||||
That being said, if you still have a complaint about the router, you may
|
||||
email the <a href="mailto:FIXME_YOUR_EMAIL_ADDRESS">maintainer</a>. If
|
||||
complaints are related to a particular service that is being abused, I will
|
||||
consider removing that service from my exit policy, which would prevent my
|
||||
router from allowing that traffic to exit through it. I can only do this on an
|
||||
IP+destination port basis, however. Common P2P ports are
|
||||
already blocked.</p>
|
||||
|
||||
<p>
|
||||
You also have the option of blocking this IP address and others on
|
||||
the Tor network if you so desire. The Tor project provides a <a
|
||||
href="https://check.torproject.org/torbulkexitlist">web service</a>
|
||||
to fetch a list of all IP addresses of Tor exit nodes that allow exiting to a
|
||||
specified IP:port combination, and an official <a
|
||||
href="https://dist.torproject.org/tordnsel/">DNSRBL</a> is also available to
|
||||
determine if a given IP address is actually a Tor exit server. Please
|
||||
be considerate
|
||||
when using these options. It would be unfortunate to deny all Tor users access
|
||||
to your site indefinitely simply because of a few bad apples.</p>
|
||||
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
@ -1,14 +0,0 @@
|
||||
@LOCALSTATEDIR@/log/tor/*log {
|
||||
daily
|
||||
rotate 5
|
||||
compress
|
||||
delaycompress
|
||||
missingok
|
||||
notifempty
|
||||
# you may need to change the username/groupname below
|
||||
create 0640 _tor _tor
|
||||
sharedscripts
|
||||
postrotate
|
||||
/etc/init.d/tor reload > /dev/null
|
||||
endscript
|
||||
}
|
@ -1,83 +0,0 @@
|
||||
#!/usr/bin/perl -w
|
||||
|
||||
require 5.005;
|
||||
use strict;
|
||||
use IO::Socket;
|
||||
use Getopt::Std;
|
||||
|
||||
# Checks routers for open socks-ports and socks5
|
||||
# Successful connects go to STDOUT, failed ones to STDERR.
|
||||
# We only do one check per loop in -d mode, so it takes some time.
|
||||
|
||||
# Contributed by Peter Kornherr <peter at wuschelpuschel dot org>, and
|
||||
# cleaned up by Peter Palfrader <peter at palfrader dot org>.
|
||||
|
||||
our($opt_i,$opt_p,$opt_d,$opt_h,$opt_l);
|
||||
getopts('i:p:dhl:');
|
||||
|
||||
if ($opt_h || !($opt_d||$opt_i||$opt_l)) {
|
||||
print "Usage: $0 -d < file_with_routers_in_it\n";
|
||||
print "or: $0 -i IP -p Port\n";
|
||||
print "or: $0 -l IP:Port\n";
|
||||
exit;
|
||||
}
|
||||
|
||||
if ($opt_d) {
|
||||
open (IN,"<-") or die $!;
|
||||
while (<IN>) {
|
||||
next unless /^router /;
|
||||
(my $routername,my $checkip,my $checkport) = (split(" "))[1,2,4];
|
||||
&do_check($checkip,$checkport,$routername);
|
||||
}
|
||||
} elsif ($opt_i && $opt_p) {
|
||||
&do_check($opt_i,$opt_p);
|
||||
} elsif ($opt_l) {
|
||||
&do_check(split(":",$opt_l));
|
||||
}
|
||||
|
||||
sub do_check {
|
||||
(my $checkip, my $checkport,my $routername) = @_;
|
||||
# as socksports may not be published (therefore "0") here,
|
||||
# let's try 9050, the default port:
|
||||
if ($checkport == 0) { $checkport = 9050; }
|
||||
# print "Checking $checkip:$checkport\n";
|
||||
my $s5socket = IO::Socket::INET->new(PeerAddr => $checkip,
|
||||
PeerPort => $checkport, Proto => "tcp", Type => SOCK_STREAM,
|
||||
Timeout => "20");
|
||||
if ($s5socket) {
|
||||
my @got;
|
||||
print $s5socket pack("CCC",'5','1','0');
|
||||
eval {
|
||||
local $SIG{ALRM} = sub { die "alarm\n" };
|
||||
alarm 10;
|
||||
read ($s5socket,$got[0],1);
|
||||
read ($s5socket,$got[1],1);
|
||||
alarm 0;
|
||||
};
|
||||
if ($@) {
|
||||
return; # die unless $@ eq "alarm\n";
|
||||
}
|
||||
if ($got[0] eq pack('C','5')) {
|
||||
if(defined($routername)) {
|
||||
print "Found SOCKS5 at $routername ($checkip:$checkport)\n";
|
||||
} else {
|
||||
print "Found SOCKS5 at $checkip:$checkport\n";
|
||||
}
|
||||
} else {
|
||||
if(defined($routername)) {
|
||||
print "$routername ($checkip:$checkport) answers - " .
|
||||
"but not SOCKS5.\n";
|
||||
} else {
|
||||
print "$checkip:$checkport answers - but not SOCKS5.\n";
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if(defined($routername)) {
|
||||
print STDERR "Can't connect to $routername " .
|
||||
"($checkip:$checkport) ($!)\n";
|
||||
} else {
|
||||
print STDERR "Can't connect to $checkip:$checkport ($!)\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,323 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# Copyright 2005-2006 Nick Mathewson
|
||||
# See the LICENSE file in the Tor distribution for licensing information.
|
||||
|
||||
# Requires Python 2.2 or later.
|
||||
|
||||
"""
|
||||
exitlist -- Given a Tor directory on stdin, lists the Tor servers
|
||||
that accept connections to given addresses.
|
||||
|
||||
example usage:
|
||||
|
||||
cat ~/.tor/cached-descriptors* | python exitlist 18.244.0.188:80
|
||||
|
||||
You should look at the "FetchUselessDescriptors" and "FetchDirInfoEarly"
|
||||
config options in the man page.
|
||||
|
||||
Note that this script won't give you a perfect list of IP addresses
|
||||
that might connect to you using Tor.
|
||||
False negatives:
|
||||
- Some Tor servers might exit from other addresses than the one they
|
||||
publish in their descriptor.
|
||||
False positives:
|
||||
- This script just looks at the descriptor lists, so it counts relays
|
||||
that were running a day in the past and aren't running now (or are
|
||||
now running at a different address).
|
||||
|
||||
See https://check.torproject.org/ for an alternative (more accurate!)
|
||||
approach.
|
||||
|
||||
"""
|
||||
|
||||
#
|
||||
# Change this to True if you want more verbose output. By default, we
|
||||
# only print the IPs of the servers that accept any the listed
|
||||
# addresses, one per line.
|
||||
#
|
||||
VERBOSE = False
|
||||
|
||||
#
|
||||
# Change this to True if you want to reverse the output, and list the
|
||||
# servers that accept *none* of the listed addresses.
|
||||
#
|
||||
INVERSE = False
|
||||
|
||||
#
|
||||
# Change this list to contain all of the target services you are interested
|
||||
# in. It must contain one entry per line, each consisting of an IPv4 address,
|
||||
# a colon, and a port number. This default is only used if we don't learn
|
||||
# about any addresses from the command-line.
|
||||
#
|
||||
ADDRESSES_OF_INTEREST = """
|
||||
1.2.3.4:80
|
||||
"""
|
||||
|
||||
|
||||
#
|
||||
# YOU DO NOT NEED TO EDIT AFTER THIS POINT.
|
||||
#
|
||||
|
||||
import sys
|
||||
import re
|
||||
import getopt
|
||||
import socket
|
||||
import struct
|
||||
import time
|
||||
|
||||
assert sys.version_info >= (2,2)
|
||||
|
||||
|
||||
def maskIP(ip,mask):
|
||||
return "".join([chr(ord(a) & ord(b)) for a,b in zip(ip,mask)])
|
||||
|
||||
def maskFromLong(lng):
|
||||
return struct.pack("!L", lng)
|
||||
|
||||
def maskByBits(n):
|
||||
return maskFromLong(0xffffffffl ^ ((1L<<(32-n))-1))
|
||||
|
||||
class Pattern:
|
||||
"""
|
||||
>>> import socket
|
||||
>>> ip1 = socket.inet_aton("192.169.64.11")
|
||||
>>> ip2 = socket.inet_aton("192.168.64.11")
|
||||
>>> ip3 = socket.inet_aton("18.244.0.188")
|
||||
|
||||
>>> print Pattern.parse("18.244.0.188")
|
||||
18.244.0.188/255.255.255.255:1-65535
|
||||
>>> print Pattern.parse("18.244.0.188/16:*")
|
||||
18.244.0.0/255.255.0.0:1-65535
|
||||
>>> print Pattern.parse("18.244.0.188/2.2.2.2:80")
|
||||
2.0.0.0/2.2.2.2:80-80
|
||||
>>> print Pattern.parse("192.168.0.1/255.255.00.0:22-25")
|
||||
192.168.0.0/255.255.0.0:22-25
|
||||
>>> p1 = Pattern.parse("192.168.0.1/255.255.00.0:22-25")
|
||||
>>> import socket
|
||||
>>> p1.appliesTo(ip1, 22)
|
||||
False
|
||||
>>> p1.appliesTo(ip2, 22)
|
||||
True
|
||||
>>> p1.appliesTo(ip2, 25)
|
||||
True
|
||||
>>> p1.appliesTo(ip2, 26)
|
||||
False
|
||||
"""
|
||||
def __init__(self, ip, mask, portMin, portMax):
|
||||
self.ip = maskIP(ip,mask)
|
||||
self.mask = mask
|
||||
self.portMin = portMin
|
||||
self.portMax = portMax
|
||||
|
||||
def __str__(self):
|
||||
return "%s/%s:%s-%s"%(socket.inet_ntoa(self.ip),
|
||||
socket.inet_ntoa(self.mask),
|
||||
self.portMin,
|
||||
self.portMax)
|
||||
|
||||
def parse(s):
|
||||
if ":" in s:
|
||||
addrspec, portspec = s.split(":",1)
|
||||
else:
|
||||
addrspec, portspec = s, "*"
|
||||
|
||||
if addrspec == '*':
|
||||
ip,mask = "\x00\x00\x00\x00","\x00\x00\x00\x00"
|
||||
elif '/' not in addrspec:
|
||||
ip = socket.inet_aton(addrspec)
|
||||
mask = "\xff\xff\xff\xff"
|
||||
else:
|
||||
ip,mask = addrspec.split("/",1)
|
||||
ip = socket.inet_aton(ip)
|
||||
if "." in mask:
|
||||
mask = socket.inet_aton(mask)
|
||||
else:
|
||||
mask = maskByBits(int(mask))
|
||||
|
||||
if portspec == '*':
|
||||
portMin = 1
|
||||
portMax = 65535
|
||||
elif '-' not in portspec:
|
||||
portMin = portMax = int(portspec)
|
||||
else:
|
||||
portMin, portMax = map(int,portspec.split("-",1))
|
||||
|
||||
return Pattern(ip,mask,portMin,portMax)
|
||||
|
||||
parse = staticmethod(parse)
|
||||
|
||||
def appliesTo(self, ip, port):
|
||||
return ((maskIP(ip,self.mask) == self.ip) and
|
||||
(self.portMin <= port <= self.portMax))
|
||||
|
||||
class Policy:
|
||||
"""
|
||||
>>> import socket
|
||||
>>> ip1 = socket.inet_aton("192.169.64.11")
|
||||
>>> ip2 = socket.inet_aton("192.168.64.11")
|
||||
>>> ip3 = socket.inet_aton("18.244.0.188")
|
||||
|
||||
>>> pol = Policy.parseLines(["reject *:80","accept 18.244.0.188:*"])
|
||||
>>> print str(pol).strip()
|
||||
reject 0.0.0.0/0.0.0.0:80-80
|
||||
accept 18.244.0.188/255.255.255.255:1-65535
|
||||
>>> pol.accepts(ip1,80)
|
||||
False
|
||||
>>> pol.accepts(ip3,80)
|
||||
False
|
||||
>>> pol.accepts(ip3,81)
|
||||
True
|
||||
"""
|
||||
|
||||
def __init__(self, lst):
|
||||
self.lst = lst
|
||||
|
||||
def parseLines(lines):
|
||||
r = []
|
||||
for item in lines:
|
||||
a,p=item.split(" ",1)
|
||||
if a == 'accept':
|
||||
a = True
|
||||
elif a == 'reject':
|
||||
a = False
|
||||
else:
|
||||
raise ValueError("Unrecognized action %r",a)
|
||||
p = Pattern.parse(p)
|
||||
r.append((p,a))
|
||||
return Policy(r)
|
||||
|
||||
parseLines = staticmethod(parseLines)
|
||||
|
||||
def __str__(self):
|
||||
r = []
|
||||
for pat, accept in self.lst:
|
||||
rule = accept and "accept" or "reject"
|
||||
r.append("%s %s\n"%(rule,pat))
|
||||
return "".join(r)
|
||||
|
||||
def accepts(self, ip, port):
|
||||
for pattern,accept in self.lst:
|
||||
if pattern.appliesTo(ip,port):
|
||||
return accept
|
||||
return True
|
||||
|
||||
class Server:
|
||||
def __init__(self, name, ip, policy, published, fingerprint):
|
||||
self.name = name
|
||||
self.ip = ip
|
||||
self.policy = policy
|
||||
self.published = published
|
||||
self.fingerprint = fingerprint
|
||||
|
||||
def uniq_sort(lst):
|
||||
d = {}
|
||||
for item in lst: d[item] = 1
|
||||
lst = d.keys()
|
||||
lst.sort()
|
||||
return lst
|
||||
|
||||
def run():
|
||||
global VERBOSE
|
||||
global INVERSE
|
||||
global ADDRESSES_OF_INTEREST
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
try:
|
||||
opts, pargs = getopt.getopt(sys.argv[1:], "vx")
|
||||
except getopt.GetoptError, e:
|
||||
print """
|
||||
usage: cat ~/.tor/cached-routers* | %s [-v] [-x] [host:port [host:port [...]]]
|
||||
-v verbose output
|
||||
-x invert results
|
||||
""" % sys.argv[0]
|
||||
sys.exit(0)
|
||||
|
||||
for o, a in opts:
|
||||
if o == "-v":
|
||||
VERBOSE = True
|
||||
if o == "-x":
|
||||
INVERSE = True
|
||||
if len(pargs):
|
||||
ADDRESSES_OF_INTEREST = "\n".join(pargs)
|
||||
|
||||
servers = []
|
||||
policy = []
|
||||
name = ip = None
|
||||
published = 0
|
||||
fp = ""
|
||||
for line in sys.stdin.xreadlines():
|
||||
if line.startswith('router '):
|
||||
if name:
|
||||
servers.append(Server(name, ip, Policy.parseLines(policy),
|
||||
published, fp))
|
||||
_, name, ip, rest = line.split(" ", 3)
|
||||
policy = []
|
||||
published = 0
|
||||
fp = ""
|
||||
elif line.startswith('fingerprint') or \
|
||||
line.startswith('opt fingerprint'):
|
||||
elts = line.strip().split()
|
||||
if elts[0] == 'opt': del elts[0]
|
||||
assert elts[0] == 'fingerprint'
|
||||
del elts[0]
|
||||
fp = "".join(elts)
|
||||
elif line.startswith('accept ') or line.startswith('reject '):
|
||||
policy.append(line.strip())
|
||||
elif line.startswith('published '):
|
||||
date = time.strptime(line[len('published '):].strip(),
|
||||
"%Y-%m-%d %H:%M:%S")
|
||||
published = time.mktime(date)
|
||||
|
||||
if name:
|
||||
servers.append(Server(name, ip, Policy.parseLines(policy), published,
|
||||
fp))
|
||||
|
||||
targets = []
|
||||
for line in ADDRESSES_OF_INTEREST.split("\n"):
|
||||
line = line.strip()
|
||||
if not line: continue
|
||||
p = Pattern.parse(line)
|
||||
targets.append((p.ip, p.portMin))
|
||||
|
||||
# remove all but the latest server of each IP/Nickname pair.
|
||||
latest = {}
|
||||
for s in servers:
|
||||
if (not latest.has_key((s.fingerprint))
|
||||
or s.published > latest[(s.fingerprint)]):
|
||||
latest[s.fingerprint] = s
|
||||
servers = latest.values()
|
||||
|
||||
accepters, rejecters = {}, {}
|
||||
for s in servers:
|
||||
for ip,port in targets:
|
||||
if s.policy.accepts(ip,port):
|
||||
accepters[s.ip] = s
|
||||
break
|
||||
else:
|
||||
rejecters[s.ip] = s
|
||||
|
||||
# If any server at IP foo accepts, the IP does not reject.
|
||||
for k in accepters.keys():
|
||||
if rejecters.has_key(k):
|
||||
del rejecters[k]
|
||||
|
||||
if INVERSE:
|
||||
printlist = rejecters.values()
|
||||
else:
|
||||
printlist = accepters.values()
|
||||
|
||||
ents = []
|
||||
if VERBOSE:
|
||||
ents = uniq_sort([ "%s\t%s"%(s.ip,s.name) for s in printlist ])
|
||||
else:
|
||||
ents = uniq_sort([ s.ip for s in printlist ])
|
||||
for e in ents:
|
||||
print e
|
||||
|
||||
def _test():
|
||||
import doctest, exitparse
|
||||
return doctest.testmod(exitparse)
|
||||
#_test()
|
||||
|
||||
run()
|
||||
|
@ -1,274 +0,0 @@
|
||||
;tor.nsi - A basic win32 installer for Tor
|
||||
; Originally written by J Doe.
|
||||
; Modified by Steve Topletz, Andrew Lewman
|
||||
; See the Tor LICENSE for licensing information
|
||||
;-----------------------------------------
|
||||
;
|
||||
!include "MUI.nsh"
|
||||
!include "LogicLib.nsh"
|
||||
!include "FileFunc.nsh"
|
||||
!insertmacro GetParameters
|
||||
!define VERSION "0.4.8.1-alpha-dev"
|
||||
!define INSTALLER "tor-${VERSION}-win32.exe"
|
||||
!define WEBSITE "https://www.torproject.org/"
|
||||
!define LICENSE "LICENSE"
|
||||
!define BIN "..\bin" ;BIN is where it expects to find tor.exe, tor-resolve.exe
|
||||
|
||||
|
||||
SetCompressor /SOLID LZMA ;Tighter compression
|
||||
RequestExecutionLevel user ;Updated for Vista compatibility
|
||||
OutFile ${INSTALLER}
|
||||
InstallDir $PROGRAMFILES\Tor
|
||||
SetOverWrite ifnewer
|
||||
Name "Tor"
|
||||
Caption "Tor ${VERSION} Setup"
|
||||
BrandingText "The Onion Router"
|
||||
CRCCheck on
|
||||
XPStyle on
|
||||
VIProductVersion "${VERSION}"
|
||||
VIAddVersionKey "ProductName" "The Onion Router: Tor"
|
||||
VIAddVersionKey "Comments" "${WEBSITE}"
|
||||
VIAddVersionKey "LegalTrademarks" "Three line BSD"
|
||||
VIAddVersionKey "LegalCopyright" "©2004-2008, Roger Dingledine, Nick Mathewson. ©2009 The Tor Project, Inc. "
|
||||
VIAddVersionKey "FileDescription" "Tor is an implementation of Onion Routing. You can read more at ${WEBSITE}"
|
||||
VIAddVersionKey "FileVersion" "${VERSION}"
|
||||
|
||||
!define MUI_WELCOMEPAGE_TITLE "Welcome to the Tor Setup Wizard"
|
||||
!define MUI_WELCOMEPAGE_TEXT "This wizard will guide you through the installation of Tor ${VERSION}.\r\n\r\nIf you have previously installed Tor and it is currently running, please exit Tor first before continuing this installation.\r\n\r\n$_CLICK"
|
||||
!define MUI_ABORTWARNING
|
||||
!define MUI_ICON "${NSISDIR}\Contrib\Graphics\Icons\win-install.ico"
|
||||
!define MUI_UNICON "${NSISDIR}\Contrib\Graphics\Icons\win-uninstall.ico"
|
||||
!define MUI_HEADERIMAGE_BITMAP "${NSISDIR}\Contrib\Graphics\Header\win.bmp"
|
||||
!define MUI_FINISHPAGE_RUN "$INSTDIR\tor.exe"
|
||||
!define MUI_FINISHPAGE_LINK "Visit the Tor website for the latest updates."
|
||||
!define MUI_FINISHPAGE_LINK_LOCATION ${WEBSITE}
|
||||
|
||||
!insertmacro MUI_PAGE_WELCOME
|
||||
; There's no point in having a clickthrough license: Our license adds
|
||||
; certain rights, but doesn't remove them.
|
||||
; !insertmacro MUI_PAGE_LICENSE "${LICENSE}"
|
||||
!insertmacro MUI_PAGE_COMPONENTS
|
||||
!insertmacro MUI_PAGE_DIRECTORY
|
||||
!insertmacro MUI_PAGE_INSTFILES
|
||||
!insertmacro MUI_PAGE_FINISH
|
||||
!insertmacro MUI_UNPAGE_WELCOME
|
||||
!insertmacro MUI_UNPAGE_CONFIRM
|
||||
!insertmacro MUI_UNPAGE_INSTFILES
|
||||
!insertmacro MUI_UNPAGE_FINISH
|
||||
!insertmacro MUI_LANGUAGE "English"
|
||||
|
||||
Var CONFIGDIR
|
||||
Var CONFIGFILE
|
||||
|
||||
Function .onInit
|
||||
Call ParseCmdLine
|
||||
FunctionEnd
|
||||
|
||||
;Sections
|
||||
;--------
|
||||
|
||||
Section "Tor" Tor
|
||||
;Files that have to be installed for tor to run and that the user
|
||||
;cannot choose not to install
|
||||
SectionIn RO
|
||||
SetOutPath $INSTDIR
|
||||
Call ExtractBinaries
|
||||
Call ExtractIcon
|
||||
WriteINIStr "$INSTDIR\Tor Website.url" "InternetShortcut" "URL" ${WEBSITE}
|
||||
|
||||
StrCpy $CONFIGFILE "torrc"
|
||||
StrCpy $CONFIGDIR $APPDATA\Tor
|
||||
; ;If $APPDATA isn't valid here (Early win95 releases with no updated
|
||||
; ; shfolder.dll) then we put it in the program directory instead.
|
||||
; StrCmp $APPDATA "" "" +2
|
||||
; StrCpy $CONFIGDIR $INSTDIR
|
||||
SetOutPath $CONFIGDIR
|
||||
;If there's already a torrc config file, ask if they want to
|
||||
;overwrite it with the new one.
|
||||
${If} ${FileExists} "$CONFIGDIR\torrc"
|
||||
MessageBox MB_ICONQUESTION|MB_YESNO "You already have a Tor config file.$\r$\nDo you want to overwrite it with the default sample config file?" IDYES Yes IDNO No
|
||||
Yes:
|
||||
Delete $CONFIGDIR\torrc
|
||||
Goto Next
|
||||
No:
|
||||
StrCpy $CONFIGFILE "torrc.sample"
|
||||
Next:
|
||||
${EndIf}
|
||||
File /oname=$CONFIGFILE "..\src\config\torrc.sample"
|
||||
|
||||
; the geoip file needs to be included and stuffed into the right directory
|
||||
; otherwise tor is unhappy
|
||||
SetOutPath $APPDATA\Tor
|
||||
Call ExtractGEOIP
|
||||
SectionEnd
|
||||
|
||||
Section "Documents" Docs
|
||||
Call ExtractDocuments
|
||||
SectionEnd
|
||||
|
||||
SubSection /e "Shortcuts" Shortcuts
|
||||
|
||||
Section "Start Menu" StartMenu
|
||||
SetOutPath $INSTDIR
|
||||
${If} ${FileExists} "$SMPROGRAMS\Tor\*.*"
|
||||
RMDir /r "$SMPROGRAMS\Tor"
|
||||
${EndIf}
|
||||
Call CreateTorLinks
|
||||
${If} ${FileExists} "$INSTDIR\Documents\*.*"
|
||||
Call CreateDocLinks
|
||||
${EndIf}
|
||||
SectionEnd
|
||||
|
||||
Section "Desktop" Desktop
|
||||
SetOutPath $INSTDIR
|
||||
CreateShortCut "$DESKTOP\Tor.lnk" "$INSTDIR\tor.exe" "" "$INSTDIR\tor.ico"
|
||||
SectionEnd
|
||||
|
||||
Section /o "Run at startup" Startup
|
||||
SetOutPath $INSTDIR
|
||||
CreateShortCut "$SMSTARTUP\Tor.lnk" "$INSTDIR\tor.exe" "" "$INSTDIR\tor.ico" "" SW_SHOWMINIMIZED
|
||||
SectionEnd
|
||||
|
||||
SubSectionEnd
|
||||
|
||||
Section "Uninstall"
|
||||
Call un.InstallPackage
|
||||
SectionEnd
|
||||
|
||||
Section -End
|
||||
WriteUninstaller "$INSTDIR\Uninstall.exe"
|
||||
;The registry entries simply add the Tor uninstaller to the Windows
|
||||
;uninstall list.
|
||||
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Tor" "DisplayName" "Tor (remove only)"
|
||||
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Tor" "UninstallString" '"$INSTDIR\Uninstall.exe"'
|
||||
SectionEnd
|
||||
|
||||
!insertmacro MUI_FUNCTION_DESCRIPTION_BEGIN
|
||||
!insertmacro MUI_DESCRIPTION_TEXT ${Tor} "The core executable and config files needed for Tor to run."
|
||||
!insertmacro MUI_DESCRIPTION_TEXT ${Docs} "Documentation about Tor."
|
||||
!insertmacro MUI_DESCRIPTION_TEXT ${ShortCuts} "Shortcuts to easily start Tor"
|
||||
!insertmacro MUI_DESCRIPTION_TEXT ${StartMenu} "Shortcuts to access Tor and its documentation from the Start Menu"
|
||||
!insertmacro MUI_DESCRIPTION_TEXT ${Desktop} "A shortcut to start Tor from the desktop"
|
||||
!insertmacro MUI_DESCRIPTION_TEXT ${Startup} "Launches Tor automatically at startup in a minimized window"
|
||||
!insertmacro MUI_FUNCTION_DESCRIPTION_END
|
||||
|
||||
;####################Functions#########################
|
||||
|
||||
Function ExtractBinaries
|
||||
File "${BIN}\tor.exe"
|
||||
File "${BIN}\tor-resolve.exe"
|
||||
FunctionEnd
|
||||
|
||||
Function ExtractGEOIP
|
||||
File "${BIN}\geoip"
|
||||
FunctionEnd
|
||||
|
||||
Function ExtractIcon
|
||||
File "${BIN}\tor.ico"
|
||||
FunctionEnd
|
||||
|
||||
Function ExtractSpecs
|
||||
File "..\doc\HACKING"
|
||||
File "..\doc\spec\address-spec.txt"
|
||||
File "..\doc\spec\bridges-spec.txt"
|
||||
File "..\doc\spec\control-spec.txt"
|
||||
File "..\doc\spec\dir-spec.txt"
|
||||
File "..\doc\spec\path-spec.txt"
|
||||
File "..\doc\spec\rend-spec.txt"
|
||||
File "..\doc\spec\socks-extensions.txt"
|
||||
File "..\doc\spec\tor-spec.txt"
|
||||
File "..\doc\spec\version-spec.txt"
|
||||
FunctionEnd
|
||||
|
||||
Function ExtractHTML
|
||||
File "..\doc\tor.html"
|
||||
File "..\doc\torify.html"
|
||||
File "..\doc\tor-resolve.html"
|
||||
File "..\doc\tor-gencert.html"
|
||||
FunctionEnd
|
||||
|
||||
Function ExtractReleaseDocs
|
||||
File "..\README"
|
||||
File "..\ChangeLog"
|
||||
File "..\LICENSE"
|
||||
FunctionEnd
|
||||
|
||||
Function ExtractDocuments
|
||||
SetOutPath "$INSTDIR\Documents"
|
||||
Call ExtractSpecs
|
||||
Call ExtractHTML
|
||||
Call ExtractReleaseDocs
|
||||
FunctionEnd
|
||||
|
||||
Function un.InstallFiles
|
||||
Delete "$DESKTOP\Tor.lnk"
|
||||
Delete "$INSTDIR\tor.exe"
|
||||
Delete "$INSTDIR\tor-resolve.exe"
|
||||
Delete "$INSTDIR\Tor Website.url"
|
||||
Delete "$INSTDIR\torrc"
|
||||
Delete "$INSTDIR\torrc.sample"
|
||||
Delete "$INSTDIR\tor.ico"
|
||||
Delete "$SMSTARTUP\Tor.lnk"
|
||||
Delete "$INSTDIR\Uninstall.exe"
|
||||
Delete "$INSTDIR\geoip"
|
||||
FunctionEnd
|
||||
|
||||
Function un.InstallDirectories
|
||||
${If} $CONFIGDIR == $INSTDIR
|
||||
RMDir /r $CONFIGDIR
|
||||
${EndIf}
|
||||
RMDir /r "$INSTDIR\Documents"
|
||||
RMDir $INSTDIR
|
||||
RMDir /r "$SMPROGRAMS\Tor"
|
||||
RMDir /r "$APPDATA\Tor"
|
||||
FunctionEnd
|
||||
|
||||
Function un.WriteRegistry
|
||||
DeleteRegKey HKLM "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\Tor"
|
||||
FunctionEnd
|
||||
|
||||
Function un.InstallPackage
|
||||
Call un.InstallFiles
|
||||
Call un.InstallDirectories
|
||||
Call un.WriteRegistry
|
||||
FunctionEnd
|
||||
|
||||
Function CreateTorLinks
|
||||
CreateDirectory "$SMPROGRAMS\Tor"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Tor.lnk" "$INSTDIR\tor.exe" "" "$INSTDIR\tor.ico"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Torrc.lnk" "Notepad.exe" "$CONFIGDIR\torrc"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Tor Website.lnk" "$INSTDIR\Tor Website.url"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Uninstall.lnk" "$INSTDIR\Uninstall.exe"
|
||||
FunctionEnd
|
||||
|
||||
Function CreateDocLinks
|
||||
CreateDirectory "$SMPROGRAMS\Tor\Documents"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Documents\Tor Documentation.lnk" "$INSTDIR\Documents"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Documents\Tor Specification.lnk" "$INSTDIR\Documents\tor-spec.txt"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Documents\Tor Address Specification.lnk" "$INSTDIR\Documents\address-spec.txt"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Documents\Tor Bridges Specification.lnk" "$INSTDIR\Documents\bridges-spec.txt"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Documents\Tor Control Specification.lnk" "$INSTDIR\Documents\control-spec.txt"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Documents\Tor Directory Specification.lnk" "$INSTDIR\Documents\dir-spec.txt"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Documents\Tor Path Specification.lnk" "$INSTDIR\Documents\path-spec.txt"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Documents\Tor Rend Specification.lnk" "$INSTDIR\Documents\rend-spec.txt"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Documents\Tor Version Specification.lnk" "$INSTDIR\Documents\version-spec.txt"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Documents\Tor SOCKS Extensions.lnk" "$INSTDIR\Documents\socks-extensions.txt"
|
||||
FunctionEnd
|
||||
|
||||
Function ParseCmdLine
|
||||
${GetParameters} $1
|
||||
${If} $1 == "-x" ;Extract All Files
|
||||
StrCpy $INSTDIR $EXEDIR
|
||||
Call ExtractBinaries
|
||||
Call ExtractDocuments
|
||||
Quit
|
||||
${ElseIf} $1 == "-b" ;Extract Binaries Only
|
||||
StrCpy $INSTDIR $EXEDIR
|
||||
Call ExtractBinaries
|
||||
Quit
|
||||
${ElseIf} $1 != ""
|
||||
MessageBox MB_OK|MB_TOPMOST `${Installer} [-x|-b]$\r$\n$\r$\n -x Extract all files$\r$\n -b Extract binary files only`
|
||||
Quit
|
||||
${EndIf}
|
||||
FunctionEnd
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 81 KiB |
@ -1,214 +0,0 @@
|
||||
;tor.nsi - A basic win32 installer for Tor
|
||||
; Originally written by J Doe.
|
||||
; See LICENSE for licensing information
|
||||
;-----------------------------------------
|
||||
; NOTE: This file might be obsolete. Look at tor-mingw.nsi.in instead.
|
||||
;-----------------------------------------
|
||||
; How to make an installer:
|
||||
; Step 0. If you are a Tor maintainer, make sure that tor.nsi has
|
||||
; the correct version number.
|
||||
; Step 1. Download and install OpenSSL. Make sure that the OpenSSL
|
||||
; version listed below matches the one you downloaded.
|
||||
; Step 2. Download and install NSIS (http://nsis.sourceforge.net)
|
||||
; Step 3. Make a directory under the main tor directory called "bin".
|
||||
; Step 4. Copy ssleay32.dll and libeay32.dll from OpenSSL into "bin".
|
||||
; Step 5. Run man2html on tor.1.in; call the result tor-reference.html
|
||||
; Run man2html on tor-resolve.1; call the result tor-resolve.html
|
||||
; Step 6. Copy torrc.sample.in to torrc.sample.
|
||||
; Step 7. Build tor.exe and tor_resolve.exe; save the result into bin.
|
||||
; Step 8. cd into contrib and run "makensis tor.nsi".
|
||||
;
|
||||
; Problems:
|
||||
; - Copying torrc.sample.in to torrc.sample and tor.1.in (implicitly)
|
||||
; to tor.1 is a Bad Thing, and leaves us with @autoconf@ vars in the final
|
||||
; result.
|
||||
; - Building Tor requires too much windows C clue.
|
||||
; - We should have actual makefiles for VC that do the right thing.
|
||||
; - I need to learn more NSIS juju to solve these:
|
||||
; - There should be a batteries-included installer that comes with
|
||||
; privoxy too. (Check privoxy license on this; be sure to include
|
||||
; all privoxy documents.)
|
||||
; - The filename should probably have a revision number.
|
||||
|
||||
!include "MUI.nsh"
|
||||
|
||||
!define VERSION "0.1.2.3-alpha-dev"
|
||||
!define INSTALLER "tor-${VERSION}-win32.exe"
|
||||
!define WEBSITE "https://www.torproject.org/"
|
||||
|
||||
!define LICENSE "..\LICENSE"
|
||||
;BIN is where it expects to find tor.exe, tor_resolve.exe, libeay32.dll and
|
||||
; ssleay32.dll
|
||||
!define BIN "..\bin"
|
||||
|
||||
SetCompressor lzma
|
||||
;SetCompressor zlib
|
||||
OutFile ${INSTALLER}
|
||||
InstallDir $PROGRAMFILES\Tor
|
||||
SetOverWrite ifnewer
|
||||
|
||||
Name "Tor"
|
||||
Caption "Tor ${VERSION} Setup"
|
||||
BrandingText "The Onion Router"
|
||||
CRCCheck on
|
||||
|
||||
;Use upx on the installer header to shrink the size.
|
||||
!packhdr header.dat "upx --best header.dat"
|
||||
|
||||
!define MUI_WELCOMEPAGE_TITLE "Welcome to the Tor ${VERSION} Setup Wizard"
|
||||
!define MUI_WELCOMEPAGE_TEXT "This wizard will guide you through the installation of Tor ${VERSION}.\r\n\r\nIf you have previously installed Tor and it is currently running, please exit Tor first before continuing this installation.\r\n\r\n$_CLICK"
|
||||
!define MUI_ABORTWARNING
|
||||
!define MUI_ICON "${NSISDIR}\Contrib\Graphics\Icons\win-install.ico"
|
||||
!define MUI_UNICON "${NSISDIR}\Contrib\Graphics\Icons\win-uninstall.ico"
|
||||
!define MUI_HEADERIMAGE_BITMAP "${NSISDIR}\Contrib\Graphics\Header\win.bmp"
|
||||
!define MUI_HEADERIMAGE
|
||||
!define MUI_FINISHPAGE_RUN "$INSTDIR\tor.exe"
|
||||
!define MUI_FINISHPAGE_LINK "Visit the Tor website for the latest updates."
|
||||
!define MUI_FINISHPAGE_LINK_LOCATION ${WEBSITE}
|
||||
|
||||
!insertmacro MUI_PAGE_WELCOME
|
||||
; There's no point in having a clickthrough license: Our license adds
|
||||
; certain rights, but doesn't remove them.
|
||||
; !insertmacro MUI_PAGE_LICENSE "${LICENSE}"
|
||||
!insertmacro MUI_PAGE_COMPONENTS
|
||||
!insertmacro MUI_PAGE_DIRECTORY
|
||||
!insertmacro MUI_PAGE_INSTFILES
|
||||
!insertmacro MUI_PAGE_FINISH
|
||||
!insertmacro MUI_UNPAGE_WELCOME
|
||||
!insertmacro MUI_UNPAGE_CONFIRM
|
||||
!insertmacro MUI_UNPAGE_INSTFILES
|
||||
!insertmacro MUI_UNPAGE_FINISH
|
||||
!insertmacro MUI_LANGUAGE "English"
|
||||
|
||||
Var configdir
|
||||
Var configfile
|
||||
|
||||
;Sections
|
||||
;--------
|
||||
|
||||
Section "Tor" Tor
|
||||
;Files that have to be installed for tor to run and that the user
|
||||
;cannot choose not to install
|
||||
SectionIn RO
|
||||
SetOutPath $INSTDIR
|
||||
File "${BIN}\tor.exe"
|
||||
File "${BIN}\tor_resolve.exe"
|
||||
WriteIniStr "$INSTDIR\Tor Website.url" "InternetShortcut" "URL" ${WEBSITE}
|
||||
|
||||
StrCpy $configfile "torrc"
|
||||
StrCpy $configdir $APPDATA\Tor
|
||||
; ;If $APPDATA isn't valid here (Early win95 releases with no updated
|
||||
; ; shfolder.dll) then we put it in the program directory instead.
|
||||
; StrCmp $APPDATA "" "" +2
|
||||
; StrCpy $configdir $INSTDIR
|
||||
SetOutPath $configdir
|
||||
;If there's already a torrc config file, ask if they want to
|
||||
;overwrite it with the new one.
|
||||
IfFileExists "$configdir\torrc" "" endiftorrc
|
||||
MessageBox MB_ICONQUESTION|MB_YESNO "You already have a Tor config file.$\r$\nDo you want to overwrite it with the default sample config file?" IDNO yesreplace
|
||||
Delete $configdir\torrc
|
||||
Goto endiftorrc
|
||||
yesreplace:
|
||||
StrCpy $configfile "torrc.sample"
|
||||
endiftorrc:
|
||||
File /oname=$configfile "..\src\config\torrc.sample"
|
||||
SectionEnd
|
||||
|
||||
Section "OpenSSL 0.9.8a" OpenSSL
|
||||
SetOutPath $INSTDIR
|
||||
File "${BIN}\libeay32.dll"
|
||||
File "${BIN}\ssleay32.dll"
|
||||
SectionEnd
|
||||
|
||||
Section "Documents" Docs
|
||||
SetOutPath "$INSTDIR\Documents"
|
||||
;File "..\doc\FAQ"
|
||||
File "..\doc\HACKING"
|
||||
File "..\doc\spec\control-spec.txt"
|
||||
File "..\doc\spec\dir-spec.txt"
|
||||
File "..\doc\spec\rend-spec.txt"
|
||||
File "..\doc\spec\socks-extensions.txt"
|
||||
File "..\doc\spec\tor-spec.txt"
|
||||
File "..\doc\spec\version-spec.txt"
|
||||
;
|
||||
; WEBSITE-FILES-HERE
|
||||
;
|
||||
File "..\doc\tor-resolve.html"
|
||||
File "..\doc\tor-reference.html"
|
||||
;
|
||||
File "..\doc\design-paper\tor-design.pdf"
|
||||
;
|
||||
File "..\README"
|
||||
File "..\AUTHORS"
|
||||
File "..\ChangeLog"
|
||||
File "..\LICENSE"
|
||||
SectionEnd
|
||||
|
||||
SubSection /e "Shortcuts" Shortcuts
|
||||
|
||||
Section "Start Menu" StartMenu
|
||||
SetOutPath $INSTDIR
|
||||
IfFileExists "$SMPROGRAMS\Tor\*.*" "" +2
|
||||
RMDir /r "$SMPROGRAMS\Tor"
|
||||
CreateDirectory "$SMPROGRAMS\Tor"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Tor.lnk" "$INSTDIR\tor.exe"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Torrc.lnk" "Notepad.exe" "$configdir\torrc"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Tor Website.lnk" "$INSTDIR\Tor Website.url"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Uninstall.lnk" "$INSTDIR\Uninstall.exe"
|
||||
IfFileExists "$INSTDIR\Documents\*.*" "" endifdocs
|
||||
CreateDirectory "$SMPROGRAMS\Tor\Documents"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Documents\Tor Manual.lnk" "$INSTDIR\Documents\tor-reference.html"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Documents\Tor Documentation.lnk" "$INSTDIR\Documents"
|
||||
CreateShortCut "$SMPROGRAMS\Tor\Documents\Tor Specification.lnk" "$INSTDIR\Documents\tor-spec.txt"
|
||||
endifdocs:
|
||||
SectionEnd
|
||||
|
||||
Section "Desktop" Desktop
|
||||
SetOutPath $INSTDIR
|
||||
CreateShortCut "$DESKTOP\Tor.lnk" "$INSTDIR\tor.exe"
|
||||
SectionEnd
|
||||
|
||||
Section /o "Run at startup" Startup
|
||||
SetOutPath $INSTDIR
|
||||
CreateShortCut "$SMSTARTUP\Tor.lnk" "$INSTDIR\tor.exe" "" "" 0 SW_SHOWMINIMIZED
|
||||
SectionEnd
|
||||
|
||||
SubSectionEnd
|
||||
|
||||
Section "Uninstall"
|
||||
Delete "$DESKTOP\Tor.lnk"
|
||||
Delete "$INSTDIR\libeay32.dll"
|
||||
Delete "$INSTDIR\ssleay32.dll"
|
||||
Delete "$INSTDIR\tor.exe"
|
||||
Delete "$INSTDIR\tor_resolve.exe"
|
||||
Delete "$INSTDIR\Tor Website.url"
|
||||
Delete "$INSTDIR\torrc"
|
||||
Delete "$INSTDIR\torrc.sample"
|
||||
StrCmp $configdir $INSTDIR +2 ""
|
||||
RMDir /r $configdir
|
||||
Delete "$INSTDIR\Uninstall.exe"
|
||||
RMDir /r "$INSTDIR\Documents"
|
||||
RMDir $INSTDIR
|
||||
RMDir /r "$SMPROGRAMS\Tor"
|
||||
Delete "$SMSTARTUP\Tor.lnk"
|
||||
DeleteRegKey HKLM "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\Tor"
|
||||
SectionEnd
|
||||
|
||||
Section -End
|
||||
WriteUninstaller "$INSTDIR\Uninstall.exe"
|
||||
;The registry entries simply add the Tor uninstaller to the Windows
|
||||
;uninstall list.
|
||||
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Tor" "DisplayName" "Tor (remove only)"
|
||||
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Tor" "UninstallString" '"$INSTDIR\Uninstall.exe"'
|
||||
SectionEnd
|
||||
|
||||
!insertmacro MUI_FUNCTION_DESCRIPTION_BEGIN
|
||||
!insertmacro MUI_DESCRIPTION_TEXT ${Tor} "The core executable and config files needed for Tor to run."
|
||||
!insertmacro MUI_DESCRIPTION_TEXT ${OpenSSL} "OpenSSL libraries required by Tor."
|
||||
!insertmacro MUI_DESCRIPTION_TEXT ${Docs} "Documentation about Tor."
|
||||
!insertmacro MUI_DESCRIPTION_TEXT ${ShortCuts} "Shortcuts to easily start Tor"
|
||||
!insertmacro MUI_DESCRIPTION_TEXT ${StartMenu} "Shortcuts to access Tor and its documentation from the Start Menu"
|
||||
!insertmacro MUI_DESCRIPTION_TEXT ${Desktop} "A shortcut to start Tor from the desktop"
|
||||
!insertmacro MUI_DESCRIPTION_TEXT ${Startup} "Launches Tor automatically at startup in a minimized window"
|
||||
!insertmacro MUI_FUNCTION_DESCRIPTION_END
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,266 +0,0 @@
|
||||
# A Padding Machine from Scratch
|
||||
|
||||
A quickstart guide by Tobias Pulls.
|
||||
|
||||
This document describes the process of building a "padding machine" in tor's new
|
||||
circuit padding framework from scratch. Notes were taken as part of porting
|
||||
[Adaptive Padding Early
|
||||
(APE)](https://www.cs.kau.se/pulls/hot/thebasketcase-ape/) from basket2 to the
|
||||
circuit padding framework. The goal is just to document the process and provide
|
||||
useful pointers along the way, not create a useful machine.
|
||||
|
||||
The quick and dirty plan is to:
|
||||
1. clone and compile tor
|
||||
2. use newly built tor in TB and at small (non-exit) relay we run
|
||||
3. add a bare-bones APE padding machine
|
||||
4. run the machine, inspect logs for activity
|
||||
5. port APE's state machine without thinking much about parameters
|
||||
|
||||
## Clone and compile tor
|
||||
|
||||
```console
|
||||
$ git clone https://gitlab.torproject.org/tpo/core/tor.git
|
||||
$ cd tor
|
||||
$ git checkout tor-0.4.1.5
|
||||
```
|
||||
Above we use the tag for tor-0.4.1.5 where the circuit padding framework was
|
||||
released. Note that this version of the framework is missing many features and
|
||||
fixes that have since been merged to origin/master. If you need the newest
|
||||
framework features, you should use that master instead.
|
||||
|
||||
```console
|
||||
$ sh autogen.sh
|
||||
$ ./configure
|
||||
$ make
|
||||
```
|
||||
When you run `./configure` you'll be told of missing dependencies and packages
|
||||
to install on debian-based distributions. Important: if you plan to run `tor` on
|
||||
a relay as part of the real Tor network and your server runs a distribution that
|
||||
uses systemd, then I'd recommend that you `apt install dpkg dpkg-dev
|
||||
libevent-dev libssl-dev asciidoc quilt dh-apparmor libseccomp-dev dh-systemd
|
||||
libsystemd-dev pkg-config dh-autoreconf libfakeroot zlib1g zlib1g-dev automake
|
||||
liblzma-dev libzstd-dev` and ensure that tor has systemd support enabled:
|
||||
`./configure --enable-systemd`. Without this, on a recent Ubuntu, my tor service
|
||||
was forcefully restarted (SIGINT interrupt) by systemd every five minutes.
|
||||
|
||||
If you want to install on your localsystem, run `make install`. For our case we
|
||||
just want the tor binary at `src/app/tor`.
|
||||
|
||||
## Use tor in TB and at a relay
|
||||
|
||||
Download and install a fresh Tor Browser (TB) from torproject.org. Make sure it
|
||||
works. From the command line, relative to the folder created when you extracted
|
||||
TB, run `./Browser/start-tor-browser --verbose` to get some basic log output.
|
||||
Note the version of tor, in my case, `Tor 0.4.0.5 (git-bf071e34aa26e096)` as
|
||||
part of TB 8.5.4. Shut down TB, copy the `tor` binary that you compiled earlier
|
||||
and replace `Browser/TorBrowser/Tor/tor`. Start TB from the command line again,
|
||||
you should see a different version, in my case `Tor 0.4.1.5
|
||||
(git-439ca48989ece545)`.
|
||||
|
||||
The relay we run is also on linux, and `tor` is located at `/usr/bin/tor`. To
|
||||
view relevant logs since last boot `sudo journalctl -b /usr/bin/tor`, where we
|
||||
find `Tor 0.4.0.5 running on Linux`. Copy the locally compiled `tor` to the
|
||||
relay at a temporary location and then make sure it's ownership and access
|
||||
rights are identical to `/usr/bin/tor`. Next, shut down the running tor service
|
||||
with `sudo service tor stop`, wait for it to stop (typically 30s), copy our
|
||||
locally compiled tor to replace `/usr/bin/tor` then start the service again.
|
||||
Checking the logs we see `or 0.4.1.5 (git-439ca48989ece545)`.
|
||||
|
||||
Repeatedly shutting down a relay is detrimental to the network and should be
|
||||
avoided. Sorry about that.
|
||||
|
||||
We have one more step left before we move on the machine: configure TB to always
|
||||
use our middle relay. Edit `Browser/TorBrowser/Data/Tor/torrc` and set
|
||||
`MiddleNodes <fingerprint>`, where `<fingerprint>` is the fingerprint of the
|
||||
relay. Start TB, visit a website, and manually confirm that the middle is used
|
||||
by looking at the circuit display.
|
||||
|
||||
## Add a bare-bones APE padding machine
|
||||
|
||||
Now the fun part. We have several resources at our disposal (mind that links
|
||||
might be broken in the future, just search for the headings):
|
||||
- The official [Circuit Padding Developer
|
||||
Documentation](https://storm.torproject.org/shared/ChieH_sLU93313A2gopZYT3x2waJ41hz5Hn2uG1Uuh7).
|
||||
- Notes we made on the [implementation of the circuit padding
|
||||
framework](https://github.com/pylls/padding-machines-for-tor/blob/master/notes/circuit-padding-framework.md).
|
||||
- The implementation of the current circuit padding machines in tor:
|
||||
[circuitpadding.c](https://gitweb.torproject.org/tor.git/tree/src/core/or/circuitpadding_machines.c)
|
||||
and
|
||||
[circuitpadding_machines.h](https://gitweb.torproject.org/tor.git/tree/src/core/or/circuitpadding_machines.h).
|
||||
|
||||
Please consult the above links for details. Moving forward, the focus is to
|
||||
describe what was done, not necessarily explaining all the details why.
|
||||
|
||||
Since we plan to make changes to tor, create a new branch `git checkout -b
|
||||
circuit-padding-ape-machine tor-0.4.1.5`.
|
||||
|
||||
We start with declaring two functions, one for the machine at the client and one
|
||||
at the relay, in `circuitpadding_machines.h`:
|
||||
|
||||
```c
|
||||
void circpad_machine_relay_wf_ape(smartlist_t *machines_sl);
|
||||
void circpad_machine_client_wf_ape(smartlist_t *machines_sl);
|
||||
```
|
||||
|
||||
The definitions go into `circuitpadding_machines.c`:
|
||||
|
||||
```c
|
||||
/**************** Adaptive Padding Early (APE) machine ****************/
|
||||
|
||||
/**
|
||||
* Create a relay-side padding machine based on the APE design.
|
||||
*/
|
||||
void
|
||||
circpad_machine_relay_wf_ape(smartlist_t *machines_sl)
|
||||
{
|
||||
circpad_machine_spec_t *relay_machine
|
||||
= tor_malloc_zero(sizeof(circpad_machine_spec_t));
|
||||
|
||||
relay_machine->name = "relay_wf_ape";
|
||||
relay_machine->is_origin_side = 0; // relay-side
|
||||
|
||||
// Pad to/from the middle relay, only when the circuit has streams
|
||||
relay_machine->target_hopnum = 2;
|
||||
relay_machine->conditions.min_hops = 2;
|
||||
relay_machine->conditions.state_mask = CIRCPAD_CIRC_STREAMS;
|
||||
|
||||
// limits to help guard against excessive padding
|
||||
relay_machine->allowed_padding_count = 1;
|
||||
relay_machine->max_padding_percent = 1;
|
||||
|
||||
// one state to start with: START (-> END, never takes a slot in states)
|
||||
circpad_machine_states_init(relay_machine, 1);
|
||||
relay_machine->states[CIRCPAD_STATE_START].
|
||||
next_state[CIRCPAD_EVENT_NONPADDING_SENT] =
|
||||
CIRCPAD_STATE_END;
|
||||
|
||||
// register the machine
|
||||
relay_machine->machine_num = smartlist_len(machines_sl);
|
||||
circpad_register_padding_machine(relay_machine, machines_sl);
|
||||
|
||||
log_info(LD_CIRC,
|
||||
"Registered relay WF APE padding machine (%u)",
|
||||
relay_machine->machine_num);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a client-side padding machine based on the APE design.
|
||||
*/
|
||||
void
|
||||
circpad_machine_client_wf_ape(smartlist_t *machines_sl)
|
||||
{
|
||||
circpad_machine_spec_t *client_machine
|
||||
= tor_malloc_zero(sizeof(circpad_machine_spec_t));
|
||||
|
||||
client_machine->name = "client_wf_ape";
|
||||
client_machine->is_origin_side = 1; // client-side
|
||||
|
||||
/** Pad to/from the middle relay, only when the circuit has streams, and only
|
||||
* for general purpose circuits (typical for web browsing)
|
||||
*/
|
||||
client_machine->target_hopnum = 2;
|
||||
client_machine->conditions.min_hops = 2;
|
||||
client_machine->conditions.state_mask = CIRCPAD_CIRC_STREAMS;
|
||||
client_machine->conditions.purpose_mask =
|
||||
circpad_circ_purpose_to_mask(CIRCUIT_PURPOSE_C_GENERAL);
|
||||
|
||||
// limits to help guard against excessive padding
|
||||
client_machine->allowed_padding_count = 1;
|
||||
client_machine->max_padding_percent = 1;
|
||||
|
||||
// one state to start with: START (-> END, never takes a slot in states)
|
||||
circpad_machine_states_init(client_machine, 1);
|
||||
client_machine->states[CIRCPAD_STATE_START].
|
||||
next_state[CIRCPAD_EVENT_NONPADDING_SENT] =
|
||||
CIRCPAD_STATE_END;
|
||||
|
||||
client_machine->machine_num = smartlist_len(machines_sl);
|
||||
circpad_register_padding_machine(client_machine, machines_sl);
|
||||
log_info(LD_CIRC,
|
||||
"Registered client WF APE padding machine (%u)",
|
||||
client_machine->machine_num);
|
||||
}
|
||||
```
|
||||
|
||||
We also have to modify `circpad_machines_init()` in `circuitpadding.c` to
|
||||
register our machines:
|
||||
|
||||
```c
|
||||
/* Register machines for the APE WF defense */
|
||||
circpad_machine_client_wf_ape(origin_padding_machines);
|
||||
circpad_machine_relay_wf_ape(relay_padding_machines);
|
||||
```
|
||||
|
||||
We run `make` to get a new `tor` binary and copy it to our local TB.
|
||||
|
||||
## Run the machine
|
||||
|
||||
To be able
|
||||
to view circuit info events in the console as we launch TB, we add `Log
|
||||
[circ]info notice stdout` to `torrc` of TB.
|
||||
|
||||
Running TB to visit example.com we first find in the log:
|
||||
|
||||
```
|
||||
Aug 30 18:36:43.000 [info] circpad_machine_client_hide_intro_circuits(): Registered client intro point hiding padding machine (0)
|
||||
Aug 30 18:36:43.000 [info] circpad_machine_relay_hide_intro_circuits(): Registered relay intro circuit hiding padding machine (0)
|
||||
Aug 30 18:36:43.000 [info] circpad_machine_client_hide_rend_circuits(): Registered client rendezvous circuit hiding padding machine (1)
|
||||
Aug 30 18:36:43.000 [info] circpad_machine_relay_hide_rend_circuits(): Registered relay rendezvous circuit hiding padding machine (1)
|
||||
Aug 30 18:36:43.000 [info] circpad_machine_client_wf_ape(): Registered client WF APE padding machine (2)
|
||||
Aug 30 18:36:43.000 [info] circpad_machine_relay_wf_ape(): Registered relay WF APE padding machine (2)
|
||||
```
|
||||
|
||||
All good, our machine is running. Looking further we find:
|
||||
|
||||
```
|
||||
Aug 30 18:36:55.000 [info] circpad_setup_machine_on_circ(): Registering machine client_wf_ape to origin circ 2 (5)
|
||||
Aug 30 18:36:55.000 [info] circpad_node_supports_padding(): Checking padding: supported
|
||||
Aug 30 18:36:55.000 [info] circpad_negotiate_padding(): Negotiating padding on circuit 2 (5), command 2
|
||||
Aug 30 18:36:55.000 [info] circpad_machine_spec_transition(): Circuit 2 circpad machine 0 transitioning from 0 to 65535
|
||||
Aug 30 18:36:55.000 [info] circpad_machine_spec_transitioned_to_end(): Padding machine in end state on circuit 2 (5)
|
||||
Aug 30 18:36:55.000 [info] circpad_circuit_machineinfo_free_idx(): Freeing padding info idx 0 on circuit 2 (5)
|
||||
Aug 30 18:36:55.000 [info] circpad_handle_padding_negotiated(): Middle node did not accept our padding request on circuit 2 (5)
|
||||
```
|
||||
We see that our middle support padding (since we upgraded to tor-0.4.1.5), that
|
||||
we attempt to negotiate, our machine starts on the client, transitions to the
|
||||
end state, and is freed. The last line shows that the middle doesn't have a
|
||||
padding machine that can run.
|
||||
|
||||
Next, we follow the same steps as earlier and replace the modified `tor` at our
|
||||
middle relay. We don't update the logging there to avoid logging on the info
|
||||
level on the live network. Looking at the client log again we see that
|
||||
negotiation works as before except for the last line: it's missing, so the
|
||||
machine is running at the middle as well.
|
||||
|
||||
## Implementing the APE state machine
|
||||
|
||||
Porting is fairly straightforward: define the states for all machines, add two
|
||||
more machines (for the receive portion of WTFP-PAD, beyond AP), and pick
|
||||
reasonable parameters for the distributions (I completely winged it now, as when
|
||||
implementing APE). The [circuit-padding-ape-machine
|
||||
branch](https://github.com/pylls/tor/tree/circuit-padding-ape-machine) contains
|
||||
the commits for the full machines with plenty of comments.
|
||||
|
||||
Some comments on the process:
|
||||
|
||||
- `tor-0.4.1.5` did not support two machines on the same circuit, the following
|
||||
fix had to be made: https://bugs.torproject.org/tpo/core/tor/31111 .
|
||||
The good news is that everything else seems to work after the small change in
|
||||
the fix.
|
||||
- APE randomizes its distributions. Currently, this can only be done during
|
||||
start of `tor`. This makes sense in the censorship circumvention setting
|
||||
(`obfs4`), less so for WF defenses: further randomizing each circuit is likely
|
||||
a PITA for attackers with few downsides.
|
||||
- it was annoying to figure out that the lack of systemd support in my compiled
|
||||
tor caused systemd to interrupt (SIGINT) my tor process at the middle relay
|
||||
every five minutes. Updated build steps above to hopefully save others the
|
||||
pain.
|
||||
- there's for sure some bug on relays when sending padding cells too early (?).
|
||||
It can happen with some probability with the APE implementation due to
|
||||
`circpad_machine_relay_wf_ape_send()`. Will investigate next.
|
||||
- Moving the registration of machines from the definition of the machines to
|
||||
`circpad_machines_init()` makes sense, as suggested in the circuit padding doc
|
||||
draft.
|
||||
|
||||
Remember that APE is just a proof-of-concept and we make zero claims about its
|
||||
ability to withstand WF attacks, in particular those based on deep learning.
|
@ -1,543 +0,0 @@
|
||||
# Coding conventions for Tor
|
||||
|
||||
tl;dr:
|
||||
|
||||
- Run configure with `--enable-fatal-warnings`
|
||||
- Document your functions
|
||||
- Write unit tests
|
||||
- Run `make check` before submitting a patch
|
||||
- Run `make distcheck` if you have made changes to build system components
|
||||
- Add a file in `changes` for your branch.
|
||||
|
||||
## Patch checklist
|
||||
|
||||
If possible, send your patch as one of these (in descending order of
|
||||
preference)
|
||||
|
||||
- A git branch we can pull from
|
||||
- Patches generated by git format-patch
|
||||
- A unified diff
|
||||
|
||||
Did you remember...
|
||||
|
||||
- To build your code while configured with `--enable-fatal-warnings`?
|
||||
- To run `make check-docs` to see whether all new options are on
|
||||
the manpage?
|
||||
- To write unit tests, as possible?
|
||||
- To run `make test-full` to test against all unit and integration tests (or
|
||||
`make test-full-online` if you have a working connection to the internet)?
|
||||
- To test that the distribution will actually work via `make distcheck`?
|
||||
- To base your code on the appropriate branch?
|
||||
- To include a file in the `changes` directory as appropriate?
|
||||
|
||||
If you are submitting a major patch or new feature, or want to in the future...
|
||||
|
||||
- Set up Chutney and Stem, see `doc/HACKING/WritingTests.md`
|
||||
- Run `make test-full` to test against all unit and integration tests.
|
||||
|
||||
If you have changed build system components:
|
||||
- Please run `make distcheck`
|
||||
- For example, if you have changed Makefiles, autoconf files, or anything
|
||||
else that affects the build system.
|
||||
|
||||
## License issues
|
||||
|
||||
Tor is distributed under the license terms in the LICENSE -- in
|
||||
brief, the "3-clause BSD license". If you send us code to
|
||||
distribute with Tor, it needs to be code that we can distribute
|
||||
under those terms. Please don't send us patches unless you agree
|
||||
to allow this.
|
||||
|
||||
Some compatible licenses include:
|
||||
|
||||
- 3-clause BSD
|
||||
- 2-clause BSD
|
||||
- CC0 Public Domain Dedication
|
||||
|
||||
## How we use Git branches
|
||||
|
||||
Each main development series (like 0.2.1, 0.2.2, etc) has its main work
|
||||
applied to a single branch. At most one series can be the development series
|
||||
at a time; all other series are maintenance series that get bug-fixes only.
|
||||
The development series is built in a git branch called "main"; the
|
||||
maintenance series are built in branches called "maint-0.2.0", "maint-0.2.1",
|
||||
and so on. We regularly merge the active maint branches forward.
|
||||
|
||||
For all series except the development series, we also have a "release" branch
|
||||
(as in "release-0.2.1"). The release series is based on the corresponding
|
||||
maintenance series, except that it deliberately lags the maint series for
|
||||
most of its patches, so that bugfix patches are not typically included in a
|
||||
maintenance release until they've been tested for a while in a development
|
||||
release. Occasionally, we'll merge an urgent bugfix into the release branch
|
||||
before it gets merged into maint, but that's rare.
|
||||
|
||||
If you're working on a bugfix for a bug that occurs in a particular version,
|
||||
base your bugfix branch on the "maint" branch for the first supported series
|
||||
that has that bug. (As of June 2013, we're supporting 0.2.3 and later.)
|
||||
|
||||
If you're working on a new feature, base it on the main branch. If you're
|
||||
working on a new feature and it will take a while to implement and/or you'd
|
||||
like to avoid the possibility of unrelated bugs in Tor while you're
|
||||
implementing your feature, consider branching off of the latest maint- branch.
|
||||
_Never_ branch off a relase- branch. Don't branch off a tag either: they come
|
||||
from release branches. Doing so will likely produce a nightmare of merge
|
||||
conflicts in the ChangeLog when it comes time to merge your branch into Tor.
|
||||
Best advice: don't try to keep an independent branch forked for more than 6
|
||||
months and expect it to merge cleanly. Try to merge pieces early and often.
|
||||
|
||||
## How we log changes
|
||||
|
||||
When you do a commit that needs a ChangeLog entry, add a new file to
|
||||
the `changes` toplevel subdirectory. It should have the format of a
|
||||
one-entry changelog section from the current ChangeLog file, as in
|
||||
|
||||
o Major bugfixes (security):
|
||||
- Fix a potential buffer overflow. Fixes bug 99999; bugfix on
|
||||
0.3.1.4-beta.
|
||||
o Minor features (performance):
|
||||
- Make tor faster. Closes ticket 88888.
|
||||
|
||||
To write a changes file, first categorize the change. Some common categories
|
||||
are:
|
||||
o Minor bugfixes (subheading):
|
||||
o Major bugfixes (subheading):
|
||||
o Minor features (subheading):
|
||||
o Major features (subheading):
|
||||
o Code simplifications and refactoring:
|
||||
o Testing:
|
||||
o Documentation:
|
||||
|
||||
The subheading is a particular area within Tor. See the ChangeLog for
|
||||
examples.
|
||||
|
||||
Then say what the change does. If it's a bugfix, mention what bug it fixes
|
||||
and when the bug was introduced. To find out which Git tag the change was
|
||||
introduced in, you can use `git describe --contains <sha1 of commit>`.
|
||||
If you don't know the commit, you can search the git diffs (-S) for the first
|
||||
instance of the feature (--reverse).
|
||||
|
||||
For example, for #30224, we wanted to know when the bridge-distribution-request
|
||||
feature was introduced into Tor:
|
||||
|
||||
```console
|
||||
$ git log -S bridge-distribution-request --reverse commit ebab521525
|
||||
Author: Roger Dingledine <arma@torproject.org>
|
||||
Date: Sun Nov 13 02:39:16 2016 -0500
|
||||
|
||||
Add new BridgeDistribution config option
|
||||
|
||||
$ git describe --contains ebab521525
|
||||
tor-0.3.2.3-alpha~15^2~4
|
||||
```
|
||||
|
||||
If you need to know all the Tor versions that contain a commit, use:
|
||||
|
||||
```console
|
||||
$ git tag --contains 9f2efd02a1 | sort -V
|
||||
tor-0.2.5.16
|
||||
tor-0.2.8.17
|
||||
tor-0.2.9.14
|
||||
tor-0.2.9.15
|
||||
...
|
||||
tor-0.3.0.13
|
||||
tor-0.3.1.9
|
||||
tor-0.3.1.10
|
||||
...
|
||||
```
|
||||
|
||||
If a bug was introduced before the oldest currently supported release series
|
||||
of Tor, and it's hard to track down where it was introduced, you may say
|
||||
"bugfix on all supported versions of Tor."
|
||||
|
||||
If at all possible, try to create the changes file in the same commit where
|
||||
you are making the change. Please give it a distinctive name that no other
|
||||
branch will use for the lifetime of your change. We usually use "ticketNNNNN"
|
||||
or "bugNNNNN", where NNNNN is the ticket number. To verify the format of the
|
||||
changes file, you can use `make check-changes`. This is run automatically as
|
||||
part of `make check` -- if it fails, we must fix it as soon as possible, so
|
||||
that our CI passes. These checks are implemented in
|
||||
`scripts/maint/lintChanges.py`.
|
||||
|
||||
Changes file style guide:
|
||||
* Make everything terse.
|
||||
|
||||
* Write from the user's point of view: describe the user-visible changes
|
||||
right away.
|
||||
|
||||
* Mention configuration options by name. If they're rare or unusual,
|
||||
remind people what they're for.
|
||||
|
||||
* Describe changes in the present tense and in the imperative: not past.
|
||||
|
||||
* Every bugfix should have a sentence of the form "Fixes bug 1234; bugfix
|
||||
on 0.1.2.3-alpha", describing what bug was fixed and where it came from.
|
||||
|
||||
* "Relays", not "servers", "nodes", or "Tor relays".
|
||||
|
||||
When we go to make a release, we will concatenate all the entries
|
||||
in changes to make a draft changelog, and clear the directory. We'll
|
||||
then edit the draft changelog into a nice readable format.
|
||||
|
||||
What needs a changes file?
|
||||
|
||||
* A not-exhaustive list: Anything that might change user-visible
|
||||
behavior. Anything that changes internals, documentation, or the build
|
||||
system enough that somebody could notice. Big or interesting code
|
||||
rewrites. Anything about which somebody might plausibly wonder "when
|
||||
did that happen, and/or why did we do that" 6 months down the line.
|
||||
|
||||
What does not need a changes file?
|
||||
|
||||
* Bugfixes for code that hasn't shipped in any released version of Tor
|
||||
* Any change to a file that is not distributed in the tarball. This
|
||||
includes:
|
||||
* Any change to our CI configuration that does not affect the distributed
|
||||
source.
|
||||
* Any change to developer-only tools, unless those tools are distributed
|
||||
in the tarball.
|
||||
* Non-functional code movement.
|
||||
* Identifier re-namings, comment edits, spelling fixes, and so on.
|
||||
|
||||
Why use changes files instead of Git commit messages?
|
||||
|
||||
* Git commit messages are written for developers, not users, and they
|
||||
are nigh-impossible to revise after the fact.
|
||||
|
||||
Why use changes files instead of entries in the ChangeLog?
|
||||
|
||||
* Having every single commit touch the ChangeLog file tended to create
|
||||
zillions of merge conflicts.
|
||||
|
||||
## Whitespace and C conformance
|
||||
|
||||
Tor's C code is written in accordance with the C99 standard. Invoke `make
|
||||
check-spaces` from time to time, so it can tell you about deviations from our C
|
||||
whitespace style. Generally, we use:
|
||||
|
||||
- Unix-style line endings
|
||||
- K&R-style indentation
|
||||
- No space before newlines
|
||||
- Never more than one blank line in a row
|
||||
- Always spaces, never tabs
|
||||
- No more than 79-columns per line.
|
||||
- Two spaces per indent.
|
||||
- A space between control keywords and their corresponding paren
|
||||
`if (x)`, `while (x)`, and `switch (x)`, never `if(x)`, `while(x)`, or
|
||||
`switch(x)`.
|
||||
- A space between anything and an open brace.
|
||||
- No space between a function name and an opening paren. `puts(x)`, not
|
||||
`puts (x)`.
|
||||
- Function declarations at the start of the line.
|
||||
- Use `void foo(void)` to declare a function with no arguments. Saying
|
||||
`void foo()` is C++ syntax.
|
||||
- Use `const` for new APIs.
|
||||
- Variables should be initialized when declared, rather than declared at the
|
||||
top of a scope.
|
||||
|
||||
If you use an editor that has plugins for editorconfig.org, the file
|
||||
`.editorconfig` will help you to conform this coding style.
|
||||
|
||||
We try hard to build without warnings everywhere. In particular, if
|
||||
you're using gcc, you should invoke the configure script with the
|
||||
option `--enable-fatal-warnings`. This will tell the compiler
|
||||
to make all warnings into errors.
|
||||
|
||||
## Functions to use; functions not to use
|
||||
|
||||
We have some wrapper functions like `tor_malloc`, `tor_free`, `tor_strdup`, and
|
||||
`tor_gettimeofday;` use them instead of their generic equivalents. (They
|
||||
always succeed or exit.)
|
||||
|
||||
Specifically, Don't use `malloc`, `realloc`, `calloc`, `free`, or
|
||||
`strdup`. Use `tor_malloc`, `tor_realloc`, `tor_calloc`, `tor_free`, or
|
||||
`tor_strdup`.
|
||||
|
||||
Don't use `tor_realloc(x, y\*z)`. Use `tor_reallocarray(x, y, z)` instead.;
|
||||
|
||||
You can get a full list of the compatibility functions that Tor provides by
|
||||
looking through `src/lib/*/*.h`. You can see the
|
||||
available containers in `src/lib/containers/*.h`. You should probably
|
||||
familiarize yourself with these modules before you write too much code, or
|
||||
else you'll wind up reinventing the wheel.
|
||||
|
||||
We don't use `strcat` or `strcpy` or `sprintf` of any of those notoriously
|
||||
broken old C functions. We also avoid `strncat` and `strncpy`. Use
|
||||
`strlcat`, `strlcpy`, or `tor_snprintf/tor_asprintf` instead.
|
||||
|
||||
We don't call `memcmp()` directly. Use `fast_memeq()`, `fast_memneq()`,
|
||||
`tor_memeq()`, or `tor_memneq()` for most purposes. If you really need a
|
||||
tristate return value, use `tor_memcmp()` or `fast_memcmp()`.
|
||||
|
||||
Don't call `assert()` directly. For hard asserts, use `tor_assert()`. For
|
||||
soft asserts, use `tor_assert_nonfatal()` or `BUG()`. If you need to print
|
||||
debug information in assert error message, consider using `tor_assertf()` and
|
||||
`tor_assertf_nonfatal()`. If you are writing code that is too low-level to
|
||||
use the logging subsystem, use `raw_assert()`.
|
||||
|
||||
Don't use `toupper()` and `tolower()` functions. Use `TOR_TOUPPER` and
|
||||
`TOR_TOLOWER` macros instead. Similarly, use `TOR_ISALPHA`, `TOR_ISALNUM` et.
|
||||
al. instead of `isalpha()`, `isalnum()`, etc.
|
||||
|
||||
When allocating new string to be added to a smartlist, use
|
||||
`smartlist_add_asprintf()` to do both at once.
|
||||
|
||||
Avoid calling BSD socket functions directly. Use portable wrappers to work
|
||||
with sockets and socket addresses. Also, sockets should be of type
|
||||
`tor_socket_t`.
|
||||
|
||||
Don't use any of these functions: they aren't portable. Use the
|
||||
version prefixed with `tor_` instead: strtok_r, memmem, memstr,
|
||||
asprintf, localtime_r, gmtime_r, inet_aton, inet_ntop, inet_pton,
|
||||
getpass, ntohll, htonll. (This list is incomplete.)
|
||||
|
||||
## What code can use what other code?
|
||||
|
||||
We're trying to simplify Tor's structure over time. In the long run, we want
|
||||
Tor to be structured as a set of modules with *no circular dependencies*.
|
||||
|
||||
This property is currently provided by the modules in src/lib, but not
|
||||
throughout the rest of Tor. In general, higher-level libraries may use
|
||||
lower-level libraries, but never the reverse.
|
||||
|
||||
To prevent new circular dependencies from landing, we have a tool that
|
||||
you can invoke with `make check-includes`, and which is run
|
||||
automatically as part of `make check`. This tool will verify that, for
|
||||
every source directory with a `.may_include` file, no local headers are
|
||||
included except those specifically permitted by the `.may_include` file.
|
||||
When editing one of these files, please make sure that you are not
|
||||
introducing any cycles into Tor's dependency graph.
|
||||
|
||||
## Floating point math is hard
|
||||
|
||||
Floating point arithmetic as typically implemented by computers is
|
||||
very counterintuitive. Failure to adequately analyze floating point
|
||||
usage can result in surprising behavior and even security
|
||||
vulnerabilities!
|
||||
|
||||
General advice:
|
||||
|
||||
- Don't use floating point.
|
||||
- If you must use floating point, document how the limits of
|
||||
floating point precision and calculation accuracy affect function
|
||||
outputs.
|
||||
- Try to do as much as possible of your calculations using integers
|
||||
(possibly acting as fixed-point numbers) and convert to floating
|
||||
point for display.
|
||||
- If you must send floating point numbers on the wire, serialize
|
||||
them in a platform-independent way. Tor avoids exchanging
|
||||
floating-point values, but when it does, it uses ASCII numerals,
|
||||
with a decimal point (".").
|
||||
- Binary fractions behave very differently from decimal fractions.
|
||||
Make sure you understand how these differences affect your
|
||||
calculations.
|
||||
- Every floating point arithmetic operation is an opportunity to
|
||||
lose precision, overflow, underflow, or otherwise produce
|
||||
undesired results. Addition and subtraction tend to be worse
|
||||
than multiplication and division (due to things like catastrophic
|
||||
cancellation). Try to arrange your calculations to minimize such
|
||||
effects.
|
||||
- Changing the order of operations changes the results of many
|
||||
floating-point calculations. Be careful when you simplify
|
||||
calculations! If the order is significant, document it using a
|
||||
code comment.
|
||||
- Comparing most floating point values for equality is unreliable.
|
||||
Avoid using `==`, instead, use `>=` or `<=`. If you use an
|
||||
epsilon value, make sure it's appropriate for the ranges in
|
||||
question.
|
||||
- Different environments (including compiler flags and per-thread
|
||||
state on a single platform!) can get different results from the
|
||||
same floating point calculations. This means you can't use
|
||||
floats in anything that needs to be deterministic, like consensus
|
||||
generation. This also makes reliable unit tests of
|
||||
floating-point outputs hard to write.
|
||||
|
||||
For additional useful advice (and a little bit of background), see
|
||||
[What Every Programmer Should Know About Floating-Point
|
||||
Arithmetic](https://floating-point-gui.de/).
|
||||
|
||||
A list of notable (and surprising) facts about floating point
|
||||
arithmetic is at [Floating-point
|
||||
complexities](https://randomascii.wordpress.com/2012/04/05/floating-point-complexities/).
|
||||
Most of that [series of posts on floating
|
||||
point](https://randomascii.wordpress.com/category/floating-point/) is
|
||||
helpful.
|
||||
|
||||
For more detailed (and math-intensive) background, see [What Every
|
||||
Computer Scientist Should Know About Floating-Point
|
||||
Arithmetic](https://docs.oracle.com/cd/E19957-01/806-3568/ncg_goldberg.html).
|
||||
|
||||
## Other C conventions
|
||||
|
||||
The `a ? b : c` trinary operator only goes inside other expressions;
|
||||
don't use it as a replacement for if. (You can ignore this inside macro
|
||||
definitions when necessary.)
|
||||
|
||||
Assignment operators shouldn't nest inside other expressions. (You can
|
||||
ignore this inside macro definitions when necessary.)
|
||||
|
||||
## Binary data and wire formats
|
||||
|
||||
Use pointer to `char` when representing NUL-terminated string. To represent
|
||||
arbitrary binary data, use pointer to `uint8_t`. (Many older Tor APIs ignore
|
||||
this rule.)
|
||||
|
||||
Refrain from attempting to encode integers by casting their pointers to byte
|
||||
arrays. Use something like `set_uint32()`/`get_uint32()` instead and don't
|
||||
forget about endianness.
|
||||
|
||||
Try to never hand-write new code to parse or generate binary
|
||||
formats. Instead, use trunnel if at all possible. See
|
||||
|
||||
https://gitweb.torproject.org/trunnel.git/tree
|
||||
|
||||
for more information about trunnel.
|
||||
|
||||
For information on adding new trunnel code to Tor, see src/trunnel/README
|
||||
|
||||
## Calling and naming conventions
|
||||
|
||||
Whenever possible, functions should return -1 on error and 0 on success.
|
||||
|
||||
For multi-word identifiers, use lowercase words combined with
|
||||
underscores. (e.g., `multi_word_identifier`). Use ALL_CAPS for macros and
|
||||
constants.
|
||||
|
||||
Typenames should end with `_t`.
|
||||
|
||||
Function names should be prefixed with a module name or object name. (In
|
||||
general, code to manipulate an object should be a module with the same name
|
||||
as the object, so it's hard to tell which convention is used.)
|
||||
|
||||
Functions that do things should have imperative-verb names
|
||||
(e.g. `buffer_clear`, `buffer_resize`); functions that return booleans should
|
||||
have predicate names (e.g. `buffer_is_empty`, `buffer_needs_resizing`).
|
||||
|
||||
If you find that you have four or more possible return code values, it's
|
||||
probably time to create an enum. If you find that you are passing three or
|
||||
more flags to a function, it's probably time to create a flags argument that
|
||||
takes a bitfield.
|
||||
|
||||
## What To Optimize
|
||||
|
||||
Don't optimize anything if it's not in the critical path. Right now, the
|
||||
critical path seems to be AES, logging, and the network itself. Feel free to
|
||||
do your own profiling to determine otherwise.
|
||||
|
||||
## Log conventions
|
||||
|
||||
[FAQ - Log Levels](https://www.torproject.org/docs/faq#LogLevel)
|
||||
|
||||
No error or warning messages should be expected during normal OR or OP
|
||||
operation.
|
||||
|
||||
If a library function is currently called such that failure always means ERR,
|
||||
then the library function should log WARN and let the caller log ERR.
|
||||
|
||||
Every message of severity INFO or higher should either (A) be intelligible
|
||||
to end-users who don't know the Tor source; or (B) somehow inform the
|
||||
end-users that they aren't expected to understand the message (perhaps
|
||||
with a string like "internal error"). Option (A) is to be preferred to
|
||||
option (B).
|
||||
|
||||
## Assertions In Tor
|
||||
|
||||
Assertions should be used for bug-detection only. Don't use assertions to
|
||||
detect bad user inputs, network errors, resource exhaustion, or similar
|
||||
issues.
|
||||
|
||||
Tor is always built with assertions enabled, so try to only use
|
||||
`tor_assert()` for cases where you are absolutely sure that crashing is the
|
||||
least bad option. Many bugs have been caused by use of `tor_assert()` when
|
||||
another kind of check would have been safer.
|
||||
|
||||
If you're writing an assertion to test for a bug that you _can_ recover from,
|
||||
use `tor_assert_nonfatal()` in place of `tor_assert()`. If you'd like to
|
||||
write a conditional that incorporates a nonfatal assertion, use the `BUG()`
|
||||
macro, as in:
|
||||
|
||||
```c
|
||||
if (BUG(ptr == NULL))
|
||||
return -1;
|
||||
```
|
||||
|
||||
## Allocator conventions
|
||||
|
||||
By convention, any tor type with a name like `abc_t` should be allocated
|
||||
by a function named `abc_new()`. This function should never return
|
||||
NULL.
|
||||
|
||||
Also, a type named `abc_t` should be freed by a function named `abc_free_()`.
|
||||
Don't call this `abc_free_()` function directly -- instead, wrap it in a
|
||||
macro called `abc_free()`, using the `FREE_AND_NULL` macro:
|
||||
|
||||
```c
|
||||
void abc_free_(abc_t *obj);
|
||||
#define abc_free(obj) FREE_AND_NULL(abc_t, abc_free_, (obj))
|
||||
```
|
||||
|
||||
This macro will free the underlying `abc_t` object, and will also set
|
||||
the object pointer to NULL.
|
||||
|
||||
You should define all `abc_free_()` functions to accept NULL inputs:
|
||||
|
||||
```c
|
||||
void
|
||||
abc_free_(abc_t *obj)
|
||||
{
|
||||
if (!obj)
|
||||
return;
|
||||
tor_free(obj->name);
|
||||
thing_free(obj->thing);
|
||||
tor_free(obj);
|
||||
}
|
||||
```
|
||||
|
||||
If you need a free function that takes a `void *` argument (for example,
|
||||
to use it as a function callback), define it with a name like
|
||||
`abc_free_void()`:
|
||||
|
||||
```c
|
||||
static void
|
||||
abc_free_void_(void *obj)
|
||||
{
|
||||
abc_free_(obj);
|
||||
}
|
||||
```
|
||||
|
||||
When deallocating, don't say e.g. `if (x) tor_free(x)`. The convention is to
|
||||
have deallocators do nothing when NULL pointer is passed.
|
||||
|
||||
## Doxygen comment conventions
|
||||
|
||||
Say what functions do as a series of one or more imperative sentences, as
|
||||
though you were telling somebody how to be the function. In other words, DO
|
||||
NOT say:
|
||||
|
||||
```c
|
||||
/** The strtol function parses a number.
|
||||
*
|
||||
* nptr -- the string to parse. It can include whitespace.
|
||||
* endptr -- a string pointer to hold the first thing that is not part
|
||||
* of the number, if present.
|
||||
* base -- the numeric base.
|
||||
* returns: the resulting number.
|
||||
*/
|
||||
long strtol(const char *nptr, char **nptr, int base);
|
||||
```
|
||||
|
||||
Instead, please DO say:
|
||||
|
||||
```c
|
||||
/** Parse a number in radix <b>base</b> from the string <b>nptr</b>,
|
||||
* and return the result. Skip all leading whitespace. If
|
||||
* <b>endptr</b> is not NULL, set *<b>endptr</b> to the first character
|
||||
* after the number parsed.
|
||||
**/
|
||||
long strtol(const char *nptr, char **nptr, int base);
|
||||
```
|
||||
|
||||
Doxygen comments are the contract in our abstraction-by-contract world: if
|
||||
the functions that call your function rely on it doing something, then your
|
||||
function should mention that it does that something in the documentation. If
|
||||
you rely on a function doing something beyond what is in its documentation,
|
||||
then you should watch out, or it might do something else later.
|
@ -1,133 +0,0 @@
|
||||
# Fuzzing Tor
|
||||
|
||||
## The simple version (no fuzzing, only tests)
|
||||
|
||||
Check out fuzzing-corpora, and set TOR_FUZZ_CORPORA to point to the place
|
||||
where you checked it out.
|
||||
|
||||
To run the fuzzing test cases in a deterministic fashion, use:
|
||||
|
||||
```console
|
||||
$ make test-fuzz-corpora
|
||||
```
|
||||
|
||||
This won't actually fuzz Tor! It will just run all the fuzz binaries
|
||||
on our existing set of testcases for the fuzzer.
|
||||
|
||||
## Different kinds of fuzzing
|
||||
|
||||
Right now we support three different kinds of fuzzer.
|
||||
|
||||
First, there's American Fuzzy Lop (AFL), a fuzzer that works by forking
|
||||
a target binary and passing it lots of different inputs on stdin. It's the
|
||||
trickiest one to set up, so I'll be describing it more below.
|
||||
|
||||
Second, there's libFuzzer, a llvm-based fuzzer that you link in as a library,
|
||||
and it runs a target function over and over. To use this one, you'll need to
|
||||
have a reasonably recent clang and libfuzzer installed. At that point, you
|
||||
just build with --enable-expensive-hardening and --enable-libfuzzer. That
|
||||
will produce a set of binaries in src/test/fuzz/lf-fuzz-* . These programs
|
||||
take as input a series of directories full of fuzzing examples. For more
|
||||
information on libfuzzer, see https://llvm.org/docs/LibFuzzer.html
|
||||
|
||||
Third, there's Google's OSS-Fuzz infrastructure, which expects to get all of
|
||||
its. For more on this, see https://github.com/google/oss-fuzz and the
|
||||
projects/tor subdirectory. You'll need to mess around with Docker a bit to
|
||||
test this one out; it's meant to run on Google's infrastructure.
|
||||
|
||||
In all cases, you'll need some starting examples to give the fuzzer when it
|
||||
starts out. There's a set in the "fuzzing-corpora" git repository. Try
|
||||
setting TOR_FUZZ_CORPORA to point to a checkout of that repository
|
||||
|
||||
## Writing Tor fuzzers
|
||||
|
||||
A tor fuzzing harness should have:
|
||||
* a fuzz_init() function to set up any necessary global state.
|
||||
* a fuzz_main() function to receive input and pass it to a parser.
|
||||
* a fuzz_cleanup() function to clear global state.
|
||||
|
||||
Most fuzzing frameworks will produce many invalid inputs - a tor fuzzing
|
||||
harness should rejecting invalid inputs without crashing or behaving badly.
|
||||
|
||||
But the fuzzing harness should crash if tor fails an assertion, triggers a
|
||||
bug, or accesses memory it shouldn't. This helps fuzzing frameworks detect
|
||||
"interesting" cases.
|
||||
|
||||
## Guided Fuzzing with AFL
|
||||
|
||||
There is no HTTPS, hash, or signature for American Fuzzy Lop's source code, so
|
||||
its integrity can't be verified. That said, you really shouldn't fuzz on a
|
||||
machine you care about, anyway.
|
||||
|
||||
To Build:
|
||||
Get AFL from http://lcamtuf.coredump.cx/afl/ and unpack it
|
||||
```console
|
||||
$ cd afl
|
||||
$ make
|
||||
$ cd ../tor
|
||||
$ PATH=$PATH:../afl/ CC="../afl/afl-gcc" ./configure --enable-expensive-hardening
|
||||
$ AFL_HARDEN=1 make clean fuzzers
|
||||
```
|
||||
|
||||
To Find The ASAN Memory Limit: (64-bit only)
|
||||
|
||||
On 64-bit platforms, afl needs to know how much memory ASAN uses,
|
||||
because ASAN tends to allocate a ridiculous amount of virtual memory,
|
||||
and then not actually use it.
|
||||
|
||||
Read afl/docs/notes_for_asan.txt for more details.
|
||||
|
||||
Download recidivm from https://jwilk.net/software/recidivm
|
||||
Download the signature
|
||||
Check the signature
|
||||
```console
|
||||
$ tar xvzf recidivm*.tar.gz
|
||||
$ cd recidivm*
|
||||
$ make
|
||||
$ /path/to/recidivm -v src/test/fuzz/fuzz-http
|
||||
```
|
||||
Use the final "ok" figure as the input to -m when calling afl-fuzz
|
||||
(Normally, recidivm would output a figure automatically, but in some cases,
|
||||
the fuzzing harness will hang when the memory limit is too small.)
|
||||
|
||||
You could also just say "none" instead of the memory limit below, if you
|
||||
don't care about memory limits.
|
||||
|
||||
|
||||
To Run:
|
||||
|
||||
```console
|
||||
$ mkdir -p src/test/fuzz/fuzz_http_findings
|
||||
$ ../afl/afl-fuzz -i ${TOR_FUZZ_CORPORA}/http -o src/test/fuzz/fuzz_http_findings -m <asan-memory-limit> -- src/test/fuzz/fuzz-http
|
||||
```
|
||||
|
||||
AFL has a multi-core mode, check the documentation for details.
|
||||
You might find the included fuzz-multi.sh script useful for this.
|
||||
|
||||
macOS (OS X) requires slightly more preparation, including:
|
||||
* using afl-clang (or afl-clang-fast from the llvm directory)
|
||||
* disabling external crash reporting (AFL will guide you through this step)
|
||||
|
||||
## Triaging Issues
|
||||
|
||||
Crashes are usually interesting, particularly if using AFL_HARDEN=1 and --enable-expensive-hardening. Sometimes crashes are due to bugs in the harness code.
|
||||
|
||||
Hangs might be interesting, but they might also be spurious machine slowdowns.
|
||||
Check if a hang is reproducible before reporting it. Sometimes, processing
|
||||
valid inputs may take a second or so, particularly with the fuzzer and
|
||||
sanitizers enabled.
|
||||
|
||||
To see what fuzz-http is doing with a test case, call it like this:
|
||||
|
||||
```console
|
||||
$ src/test/fuzz/fuzz-http --debug < /path/to/test.case
|
||||
```
|
||||
|
||||
(Logging is disabled while fuzzing to increase fuzzing speed.)
|
||||
|
||||
## Reporting Issues
|
||||
|
||||
Please report any issues discovered using the process in Tor's security issue
|
||||
policy:
|
||||
|
||||
https://gitlab.torproject.org/tpo/core/team/-/wikis/NetworkTeam/SecurityPolicy
|
@ -1,185 +0,0 @@
|
||||
# Getting started in Tor development
|
||||
|
||||
Congratulations! You've found this file, and you're reading it! This
|
||||
means that you might be interested in getting started in developing Tor.
|
||||
|
||||
(_This guide is just about Tor itself--the small network program at the
|
||||
heart of the Tor network--and not about all the other programs in the
|
||||
whole Tor ecosystem._)
|
||||
|
||||
If you are looking for a more bare-bones, less user-friendly information
|
||||
dump of important information, you might like reading the
|
||||
[doxygen output](https://src-ref.docs.torproject.org/tor/index.html).
|
||||
You probably should skim some of the topic headings there before you write
|
||||
your first patch.
|
||||
|
||||
## Required background
|
||||
|
||||
First, I'm going to assume that you can build Tor from source, and that
|
||||
you know enough of the C language to read and write it. (See the README
|
||||
file that comes with the Tor source for more information on building it,
|
||||
and any high-quality guide to C for information on programming.)
|
||||
|
||||
I'm also going to assume that you know a little bit about how to use
|
||||
Git, or that you're able to follow one of the several excellent guides
|
||||
at [git-scm](https://git-scm.org) to learn.
|
||||
|
||||
Most Tor developers develop using some Unix-based system, such as GNU/Linux,
|
||||
BSD, or macOS. It's okay to develop on Windows if you want, but you're
|
||||
going to have a more difficult time.
|
||||
|
||||
## Getting your first patch into Tor
|
||||
|
||||
Once you've reached this point, here's what you need to know.
|
||||
|
||||
1. Get the source.
|
||||
|
||||
We keep our source under version control in Git. To get the latest
|
||||
version, run:
|
||||
|
||||
```console
|
||||
$ git clone https://gitlab.torproject.org/tpo/core/tor.git
|
||||
```
|
||||
|
||||
This will give you a checkout of the main branch. If you're
|
||||
going to fix a bug that appears in a stable version, check out the
|
||||
appropriate "maint" branch, as in:
|
||||
|
||||
```console
|
||||
$ git checkout maint-0.4.3
|
||||
```
|
||||
|
||||
2. Find your way around the source.
|
||||
|
||||
Our overall code structure is explained in our
|
||||
[source documentation](https://src-ref.docs.torproject.org/tor/index.html).
|
||||
|
||||
Find a part of the code that looks interesting to you, and start
|
||||
looking around it to see how it fits together!
|
||||
|
||||
We do some unusual things in our codebase. Our testing-related
|
||||
practices and kludges are explained in `doc/HACKING/WritingTests.md`.
|
||||
|
||||
If you see something that doesn't make sense, we love to get
|
||||
questions!
|
||||
|
||||
3. Find something cool to hack on.
|
||||
|
||||
You may already have a good idea of what you'd like to work on, or
|
||||
you might be looking for a way to contribute.
|
||||
|
||||
Many people have gotten started by looking for an area where they
|
||||
personally felt Tor was underperforming, and investigating ways to
|
||||
fix it. If you're looking for ideas, you can head to
|
||||
[gitlab](https://gitlab.torproject.org) our bug tracking tool and look for
|
||||
tickets that have received the "First Contribution" label: these are ones
|
||||
that developers
|
||||
think would be pretty simple for a new person to work on. For a bigger
|
||||
challenge, you might want to look for tickets with the "Project Ideas"
|
||||
keyword: these are tickets that the developers think might be a
|
||||
good idea to build, but which we have no time to work on any time
|
||||
soon.
|
||||
|
||||
Or you might find another open ticket that piques your
|
||||
interest. It's all fine!
|
||||
|
||||
For your first patch, it is probably NOT a good idea to make
|
||||
something huge or invasive. In particular, you should probably
|
||||
avoid:
|
||||
|
||||
* Major changes spread across many parts of the codebase.
|
||||
* Major changes to programming practice or coding style.
|
||||
* Huge new features or protocol changes.
|
||||
|
||||
4. Meet the developers!
|
||||
|
||||
We discuss stuff on the tor-dev mailing list and on the `#tor-dev`
|
||||
IRC channel on OFTC. We're generally friendly and approachable,
|
||||
and we like to talk about how Tor fits together. If we have ideas
|
||||
about how something should be implemented, we'll be happy to share
|
||||
them.
|
||||
|
||||
We currently have a patch workshop at least once a week, where
|
||||
people share patches they've made and discuss how to make them
|
||||
better. The time might change in the future, but generally,
|
||||
there's no bad time to talk, and ask us about patch ideas.
|
||||
|
||||
5. Do you need to write a design proposal?
|
||||
|
||||
If your idea is very large, or it will require a change to Tor's
|
||||
protocols, there needs to be a written design proposal before it
|
||||
can be merged. (We use this process to manage changes in the
|
||||
protocols.) To write one, see the instructions at
|
||||
[the Tor proposal process](https://gitweb.torproject.org/torspec.git/plain/proposals/001-process.txt).
|
||||
If you'd like help writing a proposal, just ask! We're happy to
|
||||
help out with good ideas.
|
||||
|
||||
You might also like to look around the rest of that directory, to
|
||||
see more about open and past proposed changes to Tor's behavior.
|
||||
|
||||
6. Writing your patch
|
||||
|
||||
As you write your code, you'll probably want it to fit in with the
|
||||
standards of the rest of the Tor codebase so it will be easy for us
|
||||
to review and merge. You can learn our coding standards in
|
||||
`doc/HACKING` directory.
|
||||
|
||||
If your patch is large and/or is divided into multiple logical
|
||||
components, remember to divide it into a series of Git commits. A
|
||||
series of small changes is much easier to review than one big lump.
|
||||
|
||||
7. Testing your patch
|
||||
|
||||
We prefer that all new or modified code have unit tests for it to
|
||||
ensure that it runs correctly. Also, all code should actually be
|
||||
_run_ by somebody, to make sure it works.
|
||||
|
||||
See `doc/HACKING/WritingTests.md` for more information on how we test things
|
||||
in Tor. If you'd like any help writing tests, just ask! We're
|
||||
glad to help out.
|
||||
|
||||
8. Submitting your patch
|
||||
|
||||
We review patches through tickets on our bugtracker at
|
||||
[gitlab](https://gitlab.torproject.org). You can either upload your patches there, or
|
||||
put them at a public git repository somewhere we can fetch them
|
||||
(like gitlab, github or bitbucket) and then paste a link on the appropriate
|
||||
ticket.
|
||||
|
||||
Once your patches are available, write a short explanation of what
|
||||
you've done on trac, and then change the status of the ticket to
|
||||
needs_review.
|
||||
|
||||
9. Review, Revision, and Merge
|
||||
|
||||
With any luck, somebody will review your patch soon! If not, you
|
||||
can ask on the IRC channel; sometimes we get really busy and take
|
||||
longer than we should. But don't let us slow you down: you're the
|
||||
one who's offering help here, and we should respect your time and
|
||||
contributions.
|
||||
|
||||
When your patch is reviewed, one of these things will happen:
|
||||
|
||||
* The reviewer will say "_looks good to me_" and your
|
||||
patch will get merged right into Tor. [Assuming we're not
|
||||
in the middle of a code-freeze window. If the codebase is
|
||||
frozen, your patch will go into the next release series.]
|
||||
|
||||
* OR the reviewer will say "_looks good, just needs some small
|
||||
changes!_" And then the reviewer will make those changes,
|
||||
and merge the modified patch into Tor.
|
||||
|
||||
* OR the reviewer will say "_Here are some questions and
|
||||
comments,_" followed by a bunch of stuff that the reviewer
|
||||
thinks should change in your code, or questions that the
|
||||
reviewer has.
|
||||
|
||||
At this point, you might want to make the requested changes
|
||||
yourself, and comment on the trac ticket once you have done
|
||||
so. Or if you disagree with any of the comments, you should
|
||||
say so! And if you won't have time to make some of the
|
||||
changes, you should say that too, so that other developers
|
||||
will be able to pick up the unfinished portion.
|
||||
|
||||
Congratulations! You have now written your first patch, and gotten
|
||||
it integrated into mainline Tor.
|
@ -1,423 +0,0 @@
|
||||
# Useful tools
|
||||
|
||||
These aren't strictly necessary for hacking on Tor, but they can help track
|
||||
down bugs.
|
||||
|
||||
## Travis/Appveyor CI
|
||||
|
||||
It's CI.
|
||||
|
||||
Looks like this:
|
||||
* https://travis-ci.org/torproject/tor
|
||||
* https://ci.appveyor.com/project/torproject/tor
|
||||
|
||||
Travis builds and runs tests on Linux, and eventually macOS (#24629).
|
||||
Appveyor builds and runs tests on Windows (using Windows Services for Linux).
|
||||
|
||||
Runs automatically on Pull Requests sent to torproject/tor. You can set it up
|
||||
for your fork to build commits outside of PRs too:
|
||||
|
||||
1. sign up for GitHub: https://github.com/join
|
||||
2. fork https://github.com/torproject/tor:
|
||||
https://help.github.com/articles/fork-a-repo/
|
||||
3. follow https://docs.travis-ci.com/user/getting-started/#To-get-started-with-Travis-CI.
|
||||
skip steps involving `.travis.yml` (we already have one).
|
||||
4. go to https://ci.appveyor.com/login , log in with github, and select
|
||||
"NEW PROJECT"
|
||||
|
||||
Builds should show up on the web at travis-ci.com and on IRC at #tor-ci on
|
||||
OFTC. If they don't, ask #tor-dev (also on OFTC).
|
||||
|
||||
## Jenkins
|
||||
|
||||
It's CI/builders. Looks like this: https://jenkins.torproject.org
|
||||
|
||||
Runs automatically on commits merged to git.torproject.org. We CI the
|
||||
main branch and all supported tor versions. We also build nightly debian
|
||||
packages from main.
|
||||
|
||||
Builds Linux and Windows cross-compilation. Runs Linux tests.
|
||||
|
||||
Builds should show up on the web at jenkins.torproject.org and on IRC at
|
||||
#tor-bots on OFTC. If they don't, ask #tor-dev (also on OFTC).
|
||||
|
||||
## Valgrind
|
||||
|
||||
```console
|
||||
$ valgrind --leak-check=yes --error-limit=no --show-reachable=yes src/app/tor
|
||||
```
|
||||
|
||||
(Note that if you get a zillion openssl warnings, you will also need to
|
||||
pass `--undef-value-errors=no` to valgrind, or rebuild your openssl
|
||||
with `-DPURIFY`.)
|
||||
|
||||
## Coverity
|
||||
|
||||
Nick regularly runs the coverity static analyzer on the Tor codebase.
|
||||
|
||||
The preprocessor define `__COVERITY__` is used to work around instances
|
||||
where coverity picks up behavior that we wish to permit.
|
||||
|
||||
## clang Static Analyzer
|
||||
|
||||
The clang static analyzer can be run on the Tor codebase using Xcode (WIP)
|
||||
or a command-line build.
|
||||
|
||||
The preprocessor define `__clang_analyzer__` is used to work around instances
|
||||
where clang picks up behavior that we wish to permit.
|
||||
|
||||
## clang Runtime Sanitizers
|
||||
|
||||
To build the Tor codebase with the clang Address and Undefined Behavior
|
||||
sanitizers, see the file `contrib/clang/sanitize_blacklist.txt`.
|
||||
|
||||
Preprocessor workarounds for instances where clang picks up behavior that
|
||||
we wish to permit are also documented in the blacklist file.
|
||||
|
||||
## Running lcov for unit test coverage
|
||||
|
||||
Lcov is a utility that generates pretty HTML reports of test code coverage.
|
||||
To generate such a report:
|
||||
|
||||
```console
|
||||
$ ./configure --enable-coverage
|
||||
$ make
|
||||
$ make coverage-html
|
||||
$ $BROWSER ./coverage_html/index.html
|
||||
```
|
||||
|
||||
This will run the tor unit test suite `./src/test/test` and generate the HTML
|
||||
coverage code report under the directory `./coverage_html/`. To change the
|
||||
output directory, use `make coverage-html HTML_COVER_DIR=./funky_new_cov_dir`.
|
||||
|
||||
Coverage diffs using lcov are not currently implemented, but are being
|
||||
investigated (as of July 2014).
|
||||
|
||||
## Running the unit tests
|
||||
|
||||
To quickly run all the tests distributed with Tor:
|
||||
|
||||
```console
|
||||
$ make check
|
||||
```
|
||||
|
||||
To run the fast unit tests only:
|
||||
|
||||
```console
|
||||
$ make test
|
||||
```
|
||||
|
||||
To selectively run just some tests (the following can be combined
|
||||
arbitrarily):
|
||||
|
||||
```console
|
||||
$ ./src/test/test <name_of_test> [<name of test 2>] ...
|
||||
$ ./src/test/test <prefix_of_name_of_test>.. [<prefix_of_name_of_test2>..] ...
|
||||
$ ./src/test/test :<name_of_excluded_test> [:<name_of_excluded_test2]...
|
||||
```
|
||||
|
||||
To run all tests, including those based on Stem or Chutney:
|
||||
|
||||
```console
|
||||
$ make test-full
|
||||
```
|
||||
|
||||
To run all tests, including those based on Stem or Chutney that require a
|
||||
working connection to the internet:
|
||||
|
||||
```console
|
||||
$ make test-full-online
|
||||
```
|
||||
|
||||
## Running gcov for unit test coverage
|
||||
|
||||
```console
|
||||
$ ./configure --enable-coverage
|
||||
$ make
|
||||
$ make check
|
||||
$ # or--- make test-full ? make test-full-online?
|
||||
$ mkdir coverage-output
|
||||
$ ./scripts/test/coverage coverage-output
|
||||
```
|
||||
|
||||
(On OSX, you'll need to start with `--enable-coverage CC=clang`.)
|
||||
|
||||
If that doesn't work:
|
||||
|
||||
* Try configuring Tor with `--disable-gcc-hardening`
|
||||
* You might need to run `make clean` after you run `./configure`.
|
||||
|
||||
Then, look at the .gcov files in `coverage-output`. '-' before a line means
|
||||
that the compiler generated no code for that line. '######' means that the
|
||||
line was never reached. Lines with numbers were called that number of times.
|
||||
|
||||
For more details about how to read gcov output, see the [Invoking
|
||||
gcov](https://gcc.gnu.org/onlinedocs/gcc/Invoking-Gcov.html) chapter
|
||||
of the GCC manual.
|
||||
|
||||
If you make changes to Tor and want to get another set of coverage results,
|
||||
you can run `make reset-gcov` to clear the intermediary gcov output.
|
||||
|
||||
If you have two different `coverage-output` directories, and you want to see
|
||||
a meaningful diff between them, you can run:
|
||||
|
||||
```console
|
||||
$ ./scripts/test/cov-diff coverage-output1 coverage-output2 | less
|
||||
```
|
||||
|
||||
In this diff, any lines that were visited at least once will have coverage "1",
|
||||
and line numbers are deleted. This lets you inspect what you (probably) really
|
||||
want to know: which untested lines were changed? Are there any new untested
|
||||
lines?
|
||||
|
||||
If you run ./scripts/test/cov-exclude, it marks excluded unreached
|
||||
lines with 'x', and excluded reached lines with '!!!'.
|
||||
|
||||
## Running integration tests
|
||||
|
||||
We have the beginnings of a set of scripts to run integration tests using
|
||||
Chutney. To try them, set CHUTNEY_PATH to your chutney source directory, and
|
||||
run `make test-network`.
|
||||
|
||||
We also have scripts to run integration tests using Stem. To try them, set
|
||||
`STEM_SOURCE_DIR` to your Stem source directory, and run `test-stem`.
|
||||
|
||||
## Profiling Tor
|
||||
|
||||
Ongoing notes about Tor profiling can be found at
|
||||
https://pad.riseup.net/p/profiling-tor
|
||||
|
||||
## Profiling Tor with oprofile
|
||||
|
||||
The oprofile tool runs (on Linux only!) to tell you what functions Tor is
|
||||
spending its CPU time in, so we can identify performance bottlenecks.
|
||||
|
||||
Here are some basic instructions
|
||||
|
||||
- Build tor with debugging symbols (you probably already have, unless
|
||||
you messed with CFLAGS during the build process).
|
||||
- Build all the libraries you care about with debugging symbols
|
||||
(probably you only care about libssl, maybe zlib and Libevent).
|
||||
- Copy this tor to a new directory
|
||||
- Copy all the libraries it uses to that dir too (`ldd ./tor` will
|
||||
tell you)
|
||||
- Set LD_LIBRARY_PATH to include that dir. `ldd ./tor` should now
|
||||
show you it's using the libs in that dir
|
||||
- Run that tor
|
||||
- Reset oprofiles counters/start it
|
||||
* `opcontrol --reset; opcontrol --start`, if Nick remembers right.
|
||||
- After a while, have it dump the stats on tor and all the libs
|
||||
in that dir you created.
|
||||
* `opcontrol --dump;`
|
||||
* `opreport -l that_dir/*`
|
||||
- Profit
|
||||
|
||||
## Profiling Tor with perf
|
||||
|
||||
This works with a running Tor, and requires root.
|
||||
|
||||
1. Decide how long you want to profile for. Start with (say) 30 seconds. If that
|
||||
works, try again with longer times.
|
||||
|
||||
2. Find the PID of your running tor process.
|
||||
|
||||
3. Run `perf record --call-graph dwarf -p <PID> sleep <SECONDS>`
|
||||
|
||||
(You may need to do this as root.)
|
||||
|
||||
You might need to add `-e cpu-clock` as an option to the perf record line
|
||||
above, if you are on an older CPU without access to hardware profiling
|
||||
events, or in a VM, or something.
|
||||
|
||||
4. Now you have a perf.data file. Have a look at it with `perf report
|
||||
--no-children --sort symbol,dso` or `perf report --no-children --sort
|
||||
symbol,dso --stdio --header`. How does it look?
|
||||
|
||||
5a. Once you have a nice big perf.data file, you can compress it, encrypt it,
|
||||
and send it to your favorite Tor developers.
|
||||
|
||||
5b. Or maybe you'd rather not send a nice big perf.data file. Who knows what's
|
||||
in that!? It's kinda scary. To generate a less scary file, you can use `perf
|
||||
report -g > <FILENAME>.out`. Then you can compress that and put it somewhere
|
||||
public.
|
||||
|
||||
## Profiling Tor with gperftools aka Google-performance-tools
|
||||
|
||||
This should work on nearly any unixy system. It doesn't seem to be compatible
|
||||
with RunAsDaemon though.
|
||||
|
||||
Beforehand, install google-perftools.
|
||||
|
||||
1. You need to rebuild Tor, hack the linking steps to add `-lprofiler` to the
|
||||
libs. You can do this by adding `LIBS=-lprofiler` when you call `./configure`.
|
||||
|
||||
Now you can run Tor with profiling enabled, and use the pprof utility to look at
|
||||
performance! See the gperftools manual for more info, but basically:
|
||||
|
||||
2. Run `env CPUPROFILE=/tmp/profile src/app/tor -f <path/torrc>`. The profile file
|
||||
is not written to until Tor finishes execution.
|
||||
|
||||
3. Run `pprof src/app/tor /tmp/profile` to start the REPL.
|
||||
|
||||
## Generating and analyzing a callgraph
|
||||
|
||||
0. Build Tor on linux or mac, ideally with -O0 or -fno-inline.
|
||||
|
||||
1. Clone 'https://git.torproject.org/user/nickm/calltool.git/' .
|
||||
Follow the README in that repository.
|
||||
|
||||
Note that currently the callgraph generator can't detect calls that pass
|
||||
through function pointers.
|
||||
|
||||
## Getting emacs to edit Tor source properly
|
||||
|
||||
Nick likes to put the following snippet in his .emacs file:
|
||||
|
||||
|
||||
(add-hook 'c-mode-hook
|
||||
(lambda ()
|
||||
(font-lock-mode 1)
|
||||
(set-variable 'show-trailing-whitespace t)
|
||||
|
||||
(let ((fname (expand-file-name (buffer-file-name))))
|
||||
(cond
|
||||
((string-match "^/home/nickm/src/libevent" fname)
|
||||
(set-variable 'indent-tabs-mode t)
|
||||
(set-variable 'c-basic-offset 4)
|
||||
(set-variable 'tab-width 4))
|
||||
((string-match "^/home/nickm/src/tor" fname)
|
||||
(set-variable 'indent-tabs-mode nil)
|
||||
(set-variable 'c-basic-offset 2))
|
||||
((string-match "^/home/nickm/src/openssl" fname)
|
||||
(set-variable 'indent-tabs-mode t)
|
||||
(set-variable 'c-basic-offset 8)
|
||||
(set-variable 'tab-width 8))
|
||||
))))
|
||||
|
||||
|
||||
You'll note that it defaults to showing all trailing whitespace. The `cond`
|
||||
test detects whether the file is one of a few C free software projects that I
|
||||
often edit, and sets up the indentation level and tab preferences to match
|
||||
what they want.
|
||||
|
||||
If you want to try this out, you'll need to change the filename regex
|
||||
patterns to match where you keep your Tor files.
|
||||
|
||||
If you use emacs for editing Tor and nothing else, you could always just say:
|
||||
|
||||
|
||||
(add-hook 'c-mode-hook
|
||||
(lambda ()
|
||||
(font-lock-mode 1)
|
||||
(set-variable 'show-trailing-whitespace t)
|
||||
(set-variable 'indent-tabs-mode nil)
|
||||
(set-variable 'c-basic-offset 2)))
|
||||
|
||||
|
||||
There is probably a better way to do this. No, we are probably not going
|
||||
to clutter the files with emacs stuff.
|
||||
|
||||
## Building a tag file (code index)
|
||||
|
||||
Many functions in tor use `MOCK_IMPL` wrappers for unit tests. Your
|
||||
tag-building program must be told how to handle this syntax.
|
||||
|
||||
If you're using emacs, you can generate an emacs-compatible tag file using
|
||||
`make tags`. This will run your system's `etags`. Tor's build system assumes
|
||||
that you're using the emacs-specific version of `etags` (bundled under the
|
||||
`xemacs21-bin` package on Debian). This is incompatible with other versions of
|
||||
`etags` such as the version provided by Exuberant Ctags.
|
||||
|
||||
If you're using vim or emacs, you can also use Universal Ctags to build a tag
|
||||
file using the syntax:
|
||||
|
||||
```console
|
||||
$ ctags -R -D 'MOCK_IMPL(r,h,a)=r h a' .
|
||||
```
|
||||
|
||||
If you're using an older version of Universal Ctags, you can use the following
|
||||
instead:
|
||||
|
||||
```console
|
||||
ctags -R --mline-regex-c='/MOCK_IMPL\([^,]+,\W*([a-zA-Z0-9_]+)\W*,/\1/f/{mgroup=1}' .
|
||||
```
|
||||
|
||||
A vim-compatible tag file will be generated by default. If you use emacs, add
|
||||
the `-e` flag to generate an emacs-compatible tag file.
|
||||
|
||||
## Doxygen
|
||||
|
||||
We use the 'doxygen' utility to generate documentation from our
|
||||
source code. Here's how to use it:
|
||||
|
||||
1. Begin every file that should be documented with
|
||||
|
||||
```
|
||||
/**
|
||||
* \file filename.c
|
||||
* \brief Short description of the file.
|
||||
*/
|
||||
```
|
||||
|
||||
(Doxygen will recognize any comment beginning with /** as special.)
|
||||
|
||||
2. Before any function, structure, #define, or variable you want to
|
||||
document, add a comment of the form:
|
||||
|
||||
```
|
||||
/** Describe the function's actions in imperative sentences.
|
||||
*
|
||||
* Use blank lines for paragraph breaks
|
||||
* - and
|
||||
* - hyphens
|
||||
* - for
|
||||
* - lists.
|
||||
*
|
||||
* Write <b>argument_names</b> in boldface.
|
||||
*
|
||||
* \code
|
||||
* place_example_code();
|
||||
* between_code_and_endcode_commands();
|
||||
* \endcode
|
||||
*/
|
||||
```
|
||||
|
||||
3. Make sure to escape the characters `<`, `>`, `\`, `%` and `#` as `\<`,
|
||||
`\>`, `\\`, `\%` and `\#`.
|
||||
|
||||
4. To document structure members, you can use two forms:
|
||||
|
||||
```c
|
||||
struct foo {
|
||||
/** You can put the comment before an element; */
|
||||
int a;
|
||||
int b; /**< Or use the less-than symbol to put the comment
|
||||
* after the element. */
|
||||
};
|
||||
```
|
||||
|
||||
5. To generate documentation from the Tor source code, type:
|
||||
|
||||
```console
|
||||
$ doxygen -g
|
||||
```
|
||||
|
||||
to generate a file called `Doxyfile`. Edit that file and run
|
||||
`doxygen` to generate the API documentation.
|
||||
|
||||
6. See the Doxygen manual for more information; this summary just
|
||||
scratches the surface.
|
||||
|
||||
## Style and best-practices checking
|
||||
|
||||
We use scripts to check for various problems in the formatting and style
|
||||
of our source code. The "check-spaces" test detects a bunch of violations
|
||||
of our coding style on the local level. The "check-best-practices" test
|
||||
looks for violations of some of our complexity guidelines.
|
||||
|
||||
You can tell the tool about exceptions to the complexity guidelines via its
|
||||
exceptions file (scripts/maint/practracker/exceptions.txt). But before you
|
||||
do this, consider whether you shouldn't fix the underlying problem. Maybe
|
||||
that file really _is_ too big. Maybe that function really _is_ doing too
|
||||
much. (On the other hand, for stable release series, it is sometimes better
|
||||
to leave things unrefactored.)
|
@ -1,82 +0,0 @@
|
||||
# How to review a patch
|
||||
|
||||
Some folks have said that they'd like to review patches more often, but they
|
||||
don't know how.
|
||||
|
||||
So, here are a bunch of things to check for when reviewing a patch!
|
||||
|
||||
Note that if you can't do every one of these, that doesn't mean you can't do
|
||||
a good review! Just make it clear what you checked for and what you didn't.
|
||||
|
||||
## Top-level smell-checks
|
||||
|
||||
(Difficulty: easy)
|
||||
|
||||
- Does it compile with `--enable-fatal-warnings`?
|
||||
|
||||
- Does `make check-spaces` pass?
|
||||
|
||||
- Does `make check-changes` pass?
|
||||
|
||||
- Does it have a reasonable amount of tests? Do they pass? Do they leak
|
||||
memory?
|
||||
|
||||
- Do all the new functions, global variables, types, and structure members have
|
||||
documentation?
|
||||
|
||||
- Do all the functions, global variables, types, and structure members with
|
||||
modified behavior have modified documentation?
|
||||
|
||||
- Do all the new torrc options have documentation?
|
||||
|
||||
- If this changes Tor's behavior on the wire, is there a design proposal?
|
||||
|
||||
- If this changes anything in the code, is there a "changes" file?
|
||||
|
||||
|
||||
## Let's look at the code!
|
||||
|
||||
- Does the code conform to `CodingStandards.md`?
|
||||
|
||||
- Does the code leak memory?
|
||||
|
||||
- If two or more pointers ever point to the same object, is it clear which
|
||||
pointer "owns" the object?
|
||||
|
||||
- Are all allocated resources freed?
|
||||
|
||||
- Are all pointers that should be const, const?
|
||||
|
||||
- Are `#defines` used for 'magic' numbers?
|
||||
|
||||
- Can you understand what the code is trying to do?
|
||||
|
||||
- Can you convince yourself that the code really does that?
|
||||
|
||||
- Is there duplicated code that could be turned into a function?
|
||||
|
||||
|
||||
## Let's look at the documentation!
|
||||
|
||||
- Does the documentation conform to CodingStandards.txt?
|
||||
|
||||
- Does it make sense?
|
||||
|
||||
- Can you predict what the function will do from its documentation?
|
||||
|
||||
|
||||
## Let's think about security!
|
||||
|
||||
- If there are any arrays, buffers, are you 100% sure that they cannot
|
||||
overflow?
|
||||
|
||||
- If there is any integer math, can it overflow or underflow?
|
||||
|
||||
- If there are any allocations, are you sure there are corresponding
|
||||
deallocations?
|
||||
|
||||
- Is there a safer pattern that could be used in any case?
|
||||
|
||||
- Have they used one of the Forbidden Functions?
|
||||
|
||||
(Also see your favorite secure C programming guides.)
|
@ -1,113 +0,0 @@
|
||||
# Maintaining Tor
|
||||
|
||||
This document details the duties and processes on maintaining the Tor code
|
||||
base.
|
||||
|
||||
The first section describes who is the current Tor maintainer and what are the
|
||||
responsibilities. Tor has one main single maintainer but does have many
|
||||
committers and subsystem maintainers.
|
||||
|
||||
The second third section describes how the **alpha and main** branches are
|
||||
maintained and by whom.
|
||||
|
||||
Finally, the last section describes how the **stable** branches are maintained
|
||||
and by whom.
|
||||
|
||||
This document does not cover how Tor is released, please see
|
||||
[ReleasingTor.md](ReleasingTor.md) for that information.
|
||||
|
||||
## Tor Maintainer
|
||||
|
||||
The current maintainer is Nick Mathewson <nickm@torproject.org>.
|
||||
|
||||
The maintainer takes final decisions in terms of engineering, architecture and
|
||||
protocol design. Releasing Tor falls under their responsibility.
|
||||
|
||||
## Alpha and Master Branches
|
||||
|
||||
The Tor repository always has at all times a **main** branch which contains
|
||||
the upstream ongoing development.
|
||||
|
||||
It may also contain a branch for a released feature freezed version which is
|
||||
called the **alpha** branch. The git tag and version number is always
|
||||
postfixed with `-alpha[-dev]`. For example: `tor-0.3.5.0-alpha-dev` or
|
||||
`tor-0.3.5.3-alpha`.
|
||||
|
||||
Tor is separated into subsystems and some of those are maintained by other
|
||||
developers than the main maintainer. Those people have commit access to the
|
||||
code base but only commit (in most cases) into the subsystem they maintain.
|
||||
|
||||
Upstream merges are restricted to the alpha and main branches. Subsystem
|
||||
maintainers should never push a patch into a stable branch which is the
|
||||
responsibility of the [stable branch maintainer](#stable-branches).
|
||||
|
||||
### Who
|
||||
|
||||
In alphabetical order, the following people have upstream commit access and
|
||||
maintain the following subsystems:
|
||||
|
||||
- David Goulet <dgoulet@torproject.org>
|
||||
* Onion Service (including Shared Random).
|
||||
***keywords:*** *[tor-hs]*
|
||||
* Channels, Circuitmux, Connection, Scheduler.
|
||||
***keywords:*** *[tor-chan, tor-cmux, tor-sched, tor-conn]*
|
||||
* Cell Logic (Handling/Parsing).
|
||||
***keywords:*** *[tor-cell]*
|
||||
* Threading backend.
|
||||
***keywords:*** *[tor-thread]*
|
||||
|
||||
- George Kadianakis <asn@torproject.org>
|
||||
* Onion Service (including Shared Random).
|
||||
***keywords:*** *[tor-hs]*
|
||||
* Guard.
|
||||
***keywords:*** *[tor-guard]*
|
||||
* Pluggable Transport (excluding Bridge networking).
|
||||
***keywords:*** *[tor-pt]*
|
||||
|
||||
### Tasks
|
||||
|
||||
These are the tasks of a subsystem maintainer:
|
||||
|
||||
1. Regularly go over `merge_ready` tickets relevant to the related subsystem
|
||||
and for the current alpha or development (main branch) Milestone.
|
||||
|
||||
2. A subsystem maintainer is expected to contribute to any design changes
|
||||
(including proposals) or large patch set about the subsystem.
|
||||
|
||||
3. Leave their ego at the door. Mistakes will be made but they have to be
|
||||
taking care of seriously. Learn and move on quickly.
|
||||
|
||||
### Merging Policy
|
||||
|
||||
These are few important items to follow when merging code upstream:
|
||||
|
||||
1. To merge code upstream, the patch must have passed our CI (currently
|
||||
github.com/torproject), have a corresponding ticket and reviewed by
|
||||
**at least** one person that is not the original coder.
|
||||
|
||||
Example A: If Alice writes a patch then Bob, a Tor network team member,
|
||||
reviews it and flags it `merge_ready`. Then, the maintainer is required
|
||||
to look at the patch and makes a decision.
|
||||
|
||||
Example B: If the maintainer writes a patch then Bob, a Tor network
|
||||
team member, reviews it and flags it `merge_ready`, then the maintainer
|
||||
can merge the code upstream.
|
||||
|
||||
2. Maintainer makes sure the commit message should describe what was fixed
|
||||
and, if it applies, how was it fixed. It should also always refer to
|
||||
the ticket number.
|
||||
|
||||
3. Trivial patches such as comment change, documentation, syntax issues or
|
||||
typos can be merged without a ticket or reviewers.
|
||||
|
||||
4. Tor uses the "merge forward" method, that is, if a patch applies to the
|
||||
alpha branch, it has to be merged there first and then merged forward
|
||||
into main.
|
||||
|
||||
5. Maintainer should always consult with the network team about any doubts,
|
||||
mis-understandings or unknowns of a patch. Final word will always go to the
|
||||
main Tor maintainer.
|
||||
|
||||
## Stable Branches
|
||||
|
||||
(Currently being drafted and reviewed by the network team.)
|
@ -1,120 +0,0 @@
|
||||
# Modules in Tor
|
||||
|
||||
This document describes the build system and coding standards when writing a
|
||||
module in Tor.
|
||||
|
||||
## What is a module?
|
||||
|
||||
In the context of the tor code base, a module is a subsystem that we can
|
||||
selectively enable or disable, at `configure` time.
|
||||
|
||||
Currently, tor has these modules:
|
||||
|
||||
- Relay subsystem (relay)
|
||||
- Directory cache system (dircache).
|
||||
- Directory Authority subsystem (dirauth)
|
||||
|
||||
The dirauth code is located in its own directory in `src/feature/dirauth/`.
|
||||
|
||||
The relay code is located in a directory named `src/*/*relay`, which is
|
||||
being progressively refactored and disabled.
|
||||
|
||||
The dircache code is located in `src/*/*dircache`. Right now, it is
|
||||
disabled if and only if the relay module is disabled. (We are treating
|
||||
them as separate modules because they are logically independent, not
|
||||
because you would actually want to run one without the other.)
|
||||
|
||||
To disable a module, pass `--disable-module-{dirauth,relay}` at configure
|
||||
time. All modules are currently enabled by default.
|
||||
|
||||
## Build System
|
||||
|
||||
The changes to the build system are pretty straightforward.
|
||||
|
||||
1. Locate in the `configure.ac` file this define: `m4_define(MODULES`. It
|
||||
contains a list (white-space separated) of the module in tor. Add yours to
|
||||
the list.
|
||||
|
||||
2. Use the `AC_ARG_ENABLE([module-relay]` template for your new module. We
|
||||
use the "disable module" approach instead of enabling them one by one. So,
|
||||
by default, tor will build all the modules.
|
||||
|
||||
This will define the `HAVE_MODULE_<name>` statement which can be used in
|
||||
the C code to conditionally compile things for your module. And the
|
||||
`BUILD_MODULE_<name>` is also defined for automake files (e.g: include.am).
|
||||
|
||||
3. In the `src/core/include.am` file, locate the `MODULE_RELAY_SOURCES`
|
||||
value. You need to create your own `_SOURCES` variable for your module
|
||||
and then conditionally add the it to `LIBTOR_A_SOURCES` if you should
|
||||
build the module.
|
||||
|
||||
It is then **very** important to add your SOURCES variable to
|
||||
`src_or_libtor_testing_a_SOURCES` so the tests can build it.
|
||||
|
||||
Finally, your module will automatically be included in the
|
||||
`TOR_MODULES_ALL_ENABLED` variable which is used to build the unit tests.
|
||||
They always build everything in order to test everything.
|
||||
|
||||
## Coding
|
||||
|
||||
As mentioned above, a module should be isolated in its own directories,
|
||||
suffixed with the name of the module, in `src/*/`.
|
||||
|
||||
There are couples of "rules" you want to follow:
|
||||
|
||||
* Minimize as much as you can the number of entry points into your module.
|
||||
Less is always better but of course that doesn't work out for every use
|
||||
case. However, it is a good thing to always keep that in mind.
|
||||
|
||||
* Do **not** use the `HAVE_MODULE_<name>` define outside of the module code
|
||||
base. Every entry point should have a second definition if the module is
|
||||
disabled. For instance:
|
||||
|
||||
```c
|
||||
#ifdef HAVE_MODULE_DIRAUTH
|
||||
|
||||
int sr_init(int save_to_disk);
|
||||
|
||||
#else /* HAVE_MODULE_DIRAUTH */
|
||||
|
||||
static inline int
|
||||
sr_init(int save_to_disk)
|
||||
{
|
||||
(void) save_to_disk;
|
||||
return 0;
|
||||
}
|
||||
|
||||
#endif /* HAVE_MODULE_DIRAUTH */
|
||||
|
||||
```
|
||||
|
||||
The main reason for this approach is to avoid having conditional code
|
||||
everywhere in the code base. It should be centralized as much as possible
|
||||
which helps maintainability but also avoids conditional spaghetti code
|
||||
making the code much more difficult to follow/understand.
|
||||
|
||||
* It is possible that you end up with code that needs to be used by the rest
|
||||
of the code base but is still part of your module. As a good example, if
|
||||
you look at `src/feature/shared_random_client.c`: it contains code needed
|
||||
by the hidden service subsystem but mainly related to the shared random
|
||||
subsystem very specific to the dirauth module.
|
||||
|
||||
This is fine but try to keep it as lean as possible and never use the same
|
||||
filename as the one in the module. For example, this is a bad idea and
|
||||
should never be done:
|
||||
|
||||
- `src/feature/dirclient/shared_random.c`
|
||||
- `src/feature/dirauth/shared_random.c`
|
||||
|
||||
* When you include headers from the module, **always** use the full module
|
||||
path in your statement. Example:
|
||||
|
||||
```c
|
||||
#include "feature/dirauth/dirvote.h"`
|
||||
```
|
||||
|
||||
The main reason is that we do **not** add the module include path by default
|
||||
so it needs to be specified. But also, it helps our human brain understand
|
||||
which part comes from a module or not.
|
||||
|
||||
Even **in** the module itself, use the full include path like above.
|
@ -1,55 +0,0 @@
|
||||
# README.1st
|
||||
|
||||
## In this directory
|
||||
|
||||
This directory has helpful information about what you need to know to
|
||||
hack on Tor!
|
||||
|
||||
First, read `GettingStarted.md` to learn how to get a start in Tor
|
||||
development.
|
||||
|
||||
If you've decided to write a patch, `CodingStandards.md` will give you a bunch
|
||||
of information about how we structure our code.
|
||||
|
||||
It's important to get the code right! Reading `WritingTests.md` will
|
||||
tell you how to write and run tests in the Tor codebase.
|
||||
|
||||
There are a bunch of other programs we use to help maintain and
|
||||
develop the codebase: `HelpfulTools.md` can tell you how to use them
|
||||
with Tor.
|
||||
|
||||
If it's your job to put out Tor releases, see `ReleasingTor.md` so
|
||||
that you don't miss any steps!
|
||||
|
||||
## Additional Information
|
||||
|
||||
For full information on how Tor is supposed to work, look at the files in
|
||||
[Tor specification](https://gitlab.torproject.org/tpo/core/torspec).
|
||||
|
||||
For an explanation of how to change Tor's design to work differently, look at
|
||||
[the Tor proposal process](https://gitlab.torproject.org/tpo/core/torspec/-/blob/main/proposals/001-process.txt).
|
||||
|
||||
For the latest version of the code, get a copy of git, and
|
||||
|
||||
```console
|
||||
$ git clone https://gitlab.torproject.org/tpo/core/tor.git
|
||||
```
|
||||
|
||||
For a copy of Tor's original design paper, see
|
||||
[here](https://spec.torproject.org/tor-design). Note that Tor has changed in
|
||||
many ways since 2004.
|
||||
|
||||
For a large collection of security papers, many of which are related to Tor,
|
||||
see [Anonbib's Selected Papers in Anonymity](https://www.freehaven.net/anonbib/).
|
||||
|
||||
## Stay in touch
|
||||
|
||||
We talk about Tor on the `tor-talk` mailing list. Design proposals and
|
||||
discussion belong on the `tor-dev` mailing list. We hang around on
|
||||
irc.oftc.net, with general discussion happening on `#tor` and development
|
||||
happening on `#tor-dev`.
|
||||
|
||||
The other files in this `HACKING` directory may also be useful as you
|
||||
get started working with Tor.
|
||||
|
||||
Happy hacking!
|
@ -1,113 +0,0 @@
|
||||
# Release Series Lifecycle
|
||||
|
||||
|
||||
## End Of Life On An Old Release Series
|
||||
|
||||
Here are the steps that the maintainer should take when an old Tor release
|
||||
series reaches End of Life.
|
||||
|
||||
Note that they are _only_ for an entire series that has reached its planned
|
||||
EOL: they do not apply to security-related deprecations of individual
|
||||
patch versions.
|
||||
|
||||
|
||||
### 1. Preliminaries
|
||||
|
||||
1. A few months before End of Life:
|
||||
Write a deprecation announcement.
|
||||
Send the announcement out with every new release announcement.
|
||||
|
||||
2. A month before End of Life:
|
||||
Send the announcement to tor-announce, tor-talk, tor-relays, and the
|
||||
packagers.
|
||||
|
||||
|
||||
### 2. On The Day
|
||||
|
||||
1. Open tickets to remove the release from:
|
||||
- the jenkins builds
|
||||
- tor's Travis CI cron jobs
|
||||
- chutney's Travis CI tests
|
||||
- sbws' Travis CI tests
|
||||
- stem's Travis CI tests (but see
|
||||
https://github.com/torproject/stem/issues/51)
|
||||
- tor's scripts/git/gist-list-tor-branches.sh script
|
||||
|
||||
2. Close the milestone in Trac. To do this, go to Trac, log in,
|
||||
select "Admin" near the top of the screen, then select "Milestones" from
|
||||
the menu on the left. Click on the milestone for this version, and
|
||||
select the "Completed" checkbox. By convention, we select the date as
|
||||
the End of Life date.
|
||||
|
||||
3. Replace NNN-backport with NNN-unreached-backport in all open trac tickets.
|
||||
|
||||
4. If there are any remaining tickets in the milestone:
|
||||
- merge_ready tickets are for backports:
|
||||
- if there are no supported releases for the backport, close the ticket
|
||||
- if there is an earlier (LTS) release for the backport, move the ticket
|
||||
to that release
|
||||
- other tickets should be closed (if we won't fix them) or moved to a
|
||||
supported release (if we will fix them)
|
||||
|
||||
5. Mail the end of life announcement to tor-announce, the packagers list,
|
||||
and tor-relays. The current list of packagers is in ReleasingTor.md.
|
||||
|
||||
6. Ask at least two of weasel/arma/Sebastian to remove the old version
|
||||
number from their approved versions list.
|
||||
|
||||
7. Update the CoreTorReleases wiki page.
|
||||
|
||||
8. Open a ticket (if there is not one already) for authorities to
|
||||
start rejecting relays that are running that release series.
|
||||
This ticket should be targeted for at least a month or two
|
||||
after the series is officially EOL, unless there is an important
|
||||
reason to un-list relays early.
|
||||
|
||||
9. (LTS end-of-life only) Open a ticket (if appropriate) for updates to the
|
||||
set of required and recommended subprotocol versions. (For the process
|
||||
here, see proposal 303.)
|
||||
|
||||
10. (LTS end-of-life only) Open a ticket to remove no-longer-needed
|
||||
consensus methods. (For the process here, see proposal 290.)
|
||||
|
||||
11. (All EOL) Open a ticket to grep for obsolete series names (e.g., "0.2.9"
|
||||
and "029") in tor, chutney, sbws, fallback-scripts, and so on. These
|
||||
should be updated or removed.
|
||||
|
||||
12. Finally, make sure this document is up to date with our latest
|
||||
process.
|
||||
|
||||
## Starting A New Release Series
|
||||
|
||||
Here are the steps that the maintainer should take to start new maint and
|
||||
release branches for a stable release.
|
||||
|
||||
Note that they are _only_ for an entire series, when it first becomes stable:
|
||||
they do not apply to security-related patch release versions.
|
||||
|
||||
(Ideally, do this immediately after a release.)
|
||||
|
||||
1. Start a new maint-x.y.z branch based on main, and a new
|
||||
release-x.y.z branch based on main. They should have the same
|
||||
starting point.
|
||||
|
||||
Push both of these branches to the canonical git repository.
|
||||
|
||||
2. In the main branch, change the version to "0.x.y.0-alpha-dev". Run the
|
||||
update_versions.py script, and commit this version bump.
|
||||
|
||||
3. Tag the version bump with "tor-0.x.y.0-alpha-dev". Push the tag
|
||||
and main branch.
|
||||
|
||||
4. Open tickets for connecting the new branches to various other
|
||||
places. See section 2 above for a list of affected locations.
|
||||
|
||||
5. Stop running practracker on maintenance and release branches:
|
||||
* Remove "check-best-practices" from the check-local Makefile
|
||||
target in the maint-x.y.z branch only.
|
||||
* Delete the file scripts/maint/practracker/.enable_practracker_in_hooks
|
||||
in the maint-x.y.z branch only.
|
||||
* Merge to release-x.y.z, but do not forward-port to the main branch.
|
||||
|
||||
6. Finally, make sure this document is up to date with our latest
|
||||
process.
|
@ -1,179 +0,0 @@
|
||||
# How to Release Tor
|
||||
|
||||
Here are the steps that the maintainer should take when putting out a
|
||||
new Tor release. It is split in 3 stages and coupled with our Tor CI Release
|
||||
pipeline.
|
||||
|
||||
Before we begin, first rule is to make sure:
|
||||
|
||||
- Our CIs (*nix and Windows) pass for each version to release
|
||||
- Coverity has no new alerts
|
||||
|
||||
## 0. Security Release
|
||||
|
||||
To start with, if you are doing a security release, this must be done few days
|
||||
prior to the release:
|
||||
|
||||
1. If this is going to be an important security release, give the packagers
|
||||
advance warning, via `tor-packagers@lists.torproject.org`.
|
||||
|
||||
|
||||
## 1. Preliminaries
|
||||
|
||||
The following must be done **2 days** at the very least prior to the release:
|
||||
|
||||
1. Add the version(s) in the dirauth-conf git repository as the
|
||||
RecommendedVersion and RequiredVersion so they can be approved by the
|
||||
authorities and be in the consensus before the release.
|
||||
|
||||
2. Send a pre-release announcement to `tor-project@lists.torproject.org` in
|
||||
order to inform every teams in Tor of the upcoming release. This is so
|
||||
we can avoid creating release surprises and sync with other teams.
|
||||
|
||||
3. Ask the network-team to review the `changes/` files in all versions we
|
||||
are about to release. This step is encouraged but not mandatory.
|
||||
|
||||
|
||||
## 2. Tarballs
|
||||
|
||||
To build the tarballs to release, we need to launch the CI release pipeline:
|
||||
|
||||
https://gitlab.torproject.org/tpo/core/tor-ci-release
|
||||
|
||||
The `versions.yml` needs to be modified with the Tor versions you want to
|
||||
release. Once done, git commit and push to trigger the release pipeline.
|
||||
|
||||
The first two stages (Preliminary and Patches) will be run automatically. The
|
||||
Build stage needs to be triggered manually once all generated patches have
|
||||
been merged upstream.
|
||||
|
||||
1. Download the generated patches from the `Patches` stage.
|
||||
|
||||
Apply these patches to the `main` or `release` branch as appropriate.
|
||||
(Version bumps apply to `maint`; anything touching the changelog should
|
||||
apply only to `main` or `release`.)
|
||||
|
||||
When updating the version, it will be on `maint` branches and so to
|
||||
merge-forward, use `git merge -s ours`. For instance, if merging the
|
||||
version change of `maint-0.4.5` into `maint-0.4.6`, do on `maint-0.4.6`
|
||||
this command: `git merge -s ours maint-0.4.5`. And then you can proceed
|
||||
with a git-merge-forward.
|
||||
|
||||
2. For the ChangeLog and ReleaseNotes, you need to write a blurb at the top
|
||||
explaining a bit the release.
|
||||
|
||||
3. Review, modify if needed, and merge them upstream.
|
||||
|
||||
4. Manually trigger the `maintained` job in the `Build` stage so the CI can
|
||||
build the tarballs without errors.
|
||||
|
||||
Once this is done, each selected developers need to build the tarballs in a
|
||||
reproducible way using:
|
||||
|
||||
https://gitlab.torproject.org/tpo/core/tor-ci-reproducible
|
||||
|
||||
Steps are:
|
||||
|
||||
1. Run `./build.sh` which will download everything you need, including the
|
||||
latest tarballs from the release CI, and auto-commit the signatures if
|
||||
the checksum match. You will need to confirm the commits.
|
||||
|
||||
2. If all is good, `git push origin main` your signatures.
|
||||
|
||||
Once all signatures from all selected developers have been committed:
|
||||
|
||||
1. Manually trigger the `signature` job in the `Post-process` stage of the
|
||||
CI release pipeline.
|
||||
|
||||
2. If it passes, the tarball(s) and signature(s) will be available as
|
||||
artifacts and should be used for the release.
|
||||
|
||||
3. Put them on `dist.torproject.org`:
|
||||
|
||||
Upload the tarball and its sig to the dist website:
|
||||
|
||||
`rsync -avP tor-*.gz{,.asc} dist-master.torproject.org:/srv/dist-master.torproject.org/htdocs/`
|
||||
|
||||
Then, on dist-master.torproject.org, run:
|
||||
|
||||
`static-update-component dist.torproject.org`
|
||||
|
||||
For an alpha or latest stable, open an MR in
|
||||
https://gitlab.torproject.org/tpo/web/tpo that updates the
|
||||
`databags/versions.ini` to note the new version.
|
||||
|
||||
(NOTE: Due to #17805, there can only be one stable version listed at once.
|
||||
Nonetheless, do not call your version "alpha" if it is stable, or people
|
||||
will get confused.)
|
||||
|
||||
(NOTE: It will take a while for the website update scripts to update the
|
||||
website.)
|
||||
|
||||
|
||||
## 3. Post Process
|
||||
|
||||
Once the tarballs have been uploaded and are ready to be announced, we need to
|
||||
do the following:
|
||||
|
||||
1. Tag versions (`main` branch or `release` branch as appropriate) using
|
||||
`git tag -s tor-0.x.y.z-<status>` and then push the tag(s):
|
||||
`git push origin tor-0.x.y.z-<status>`
|
||||
|
||||
(This should be the `main` or `release` branch because that is the one
|
||||
from which the tarballs are built. We want our tags to match our
|
||||
tarballs.)
|
||||
|
||||
2. Merge upstream the artifacts from the `patches` job in the
|
||||
`Post-process` stage of the CI release pipeline.
|
||||
|
||||
Like step (2.1) above, the `-dev` version bump need to be done manually
|
||||
with a `git merge -s ours`.
|
||||
|
||||
3. Write and post the release announcement for the `forum.torproject.net`
|
||||
in the `News -> Tor Release Announcement` category.
|
||||
|
||||
If possible, mention in which Tor Browser version (with dates) the
|
||||
release will be in. This usually only applies to the latest stable.
|
||||
|
||||
4. Inform `tor-announce@lists.torproject.org` with the releasing pointing to
|
||||
the Forum. Append the ChangeLog there. We do this until we can automate
|
||||
such post from the forum directly.
|
||||
|
||||
5. Update torproject.org website by submitting a MR to
|
||||
https://gitlab.torproject.org/tpo/web/tpo
|
||||
|
||||
The `databags/versions.ini` file is the one to change with the newly
|
||||
released version(s).
|
||||
|
||||
### New Stable
|
||||
|
||||
1. Create the `maint-x.y.z` and `release-x.y.z` branches at the version
|
||||
tag. Then update the `./scripts/git/git-list-tor-branches.sh` with the
|
||||
new version.
|
||||
|
||||
2. Update `./scripts/git/git-list-tor-branches.sh` and
|
||||
`./scripts/ci/ci-driver.sh` with the new version in `maint-x.y.z` and
|
||||
then merge forward into main. (If you haven't pushed remotely the new
|
||||
branches, merge the local branch).
|
||||
|
||||
3. In `main`, bump version to the next series: `tor-x.y.0-alpha-dev` and
|
||||
then tag it: `git tag -s tor-x.y.0-alpha-dev`
|
||||
|
||||
## Appendix: An alternative means to notify packagers
|
||||
|
||||
If for some reason you need to contact a bunch of packagers without
|
||||
using the publicly archived tor-packagers list, you can try these
|
||||
people:
|
||||
|
||||
- {weasel,sysrqb,mikeperry} at torproject dot org
|
||||
- {blueness} at gentoo dot org
|
||||
- {paul} at invizbox dot io
|
||||
- {vincent} at invizbox dot com
|
||||
- {lfleischer} at archlinux dot org
|
||||
- {Nathan} at freitas dot net
|
||||
- {mike} at tig dot as
|
||||
- {tails-rm} at boum dot org
|
||||
- {simon} at sdeziel.info
|
||||
- {yuri} at freebsd.org
|
||||
- {mh+tor} at scrit.ch
|
||||
- {security} at brave.com
|
@ -1,255 +0,0 @@
|
||||
# CHECKLIST
|
||||
|
||||
Here's a summary checklist, with the things that Nick messes up most often.
|
||||
|
||||
Did you:
|
||||
|
||||
* [ ] Copy the ChangeLog to the ReleaseNotes?
|
||||
* [ ] Check that the new versions got approved?
|
||||
* [ ] Check the release date in the ChangeLog?
|
||||
* [ ] Update the GeoIP file?
|
||||
|
||||
# Putting out a new release
|
||||
|
||||
Here are the steps that the maintainer should take when putting out a
|
||||
new Tor release:
|
||||
|
||||
## 0. Preliminaries
|
||||
|
||||
1. Get at least three of weasel/arma/Sebastian/Sina to put the new
|
||||
version number in their approved versions list. Give them a few
|
||||
days to do this if you can.
|
||||
|
||||
2. If this is going to be an important security release, give the packagers
|
||||
advance warning, via `tor-packagers@lists.torproject.org`.
|
||||
|
||||
|
||||
3. Given the release date for Tor, ask the TB team about the likely release
|
||||
date of a TB that contains it. See note below in "commit, upload,
|
||||
announce".
|
||||
|
||||
## I. Make sure it works
|
||||
|
||||
1. Make sure that CI passes: have a look at the branches on gitlab.
|
||||
|
||||
_Optionally_, have a look at Travis
|
||||
(https://travis-ci.org/torproject/tor/branches), Appveyor
|
||||
(https://ci.appveyor.com/project/torproject/tor/history), and
|
||||
Jenkins (https://jenkins.torproject.org/view/tor/).
|
||||
Make sure you're looking at the right branches.
|
||||
|
||||
If there are any unexplained failures, try to fix them or figure them
|
||||
out.
|
||||
|
||||
2. Verify that there are no big outstanding issues. You might find such
|
||||
issues --
|
||||
|
||||
* On Gitlab
|
||||
|
||||
* On coverity scan
|
||||
|
||||
* On OSS-Fuzz
|
||||
|
||||
## II. Write a changelog
|
||||
|
||||
|
||||
1a. (Alpha release variant)
|
||||
|
||||
Gather the `changes/*` files into a changelog entry, rewriting many
|
||||
of them and reordering to focus on what users and funders would find
|
||||
interesting and understandable.
|
||||
|
||||
To do this, run `./scripts/maint/sortChanges.py changes/* > changelog.in`
|
||||
to combine headings and sort the entries. Copy the changelog.in file into
|
||||
the ChangeLog. Run `format_changelog.py --inplace` (see below) to clean up
|
||||
the line breaks.
|
||||
|
||||
Remove the `changes/*` files that you just merged into the ChangeLog.
|
||||
|
||||
After that, it's time to hand-edit and fix the issues that
|
||||
lintChanges can't find:
|
||||
|
||||
1. Within each section, sort by "version it's a bugfix on", else by
|
||||
numerical ticket order.
|
||||
|
||||
2. Clean them up:
|
||||
|
||||
Make stuff very terse
|
||||
|
||||
Describe the user-visible problem right away
|
||||
|
||||
Mention relevant config options by name. If they're rare or unusual,
|
||||
remind people what they're for
|
||||
|
||||
Avoid starting lines with open-paren
|
||||
|
||||
Present and imperative tense: not past.
|
||||
|
||||
"Relays", not "servers" or "nodes" or "Tor relays".
|
||||
|
||||
"Onion services", not "hidden services".
|
||||
|
||||
"Stop FOOing", not "Fix a bug where we would FOO".
|
||||
|
||||
Try not to let any given section be longer than about a page. Break up
|
||||
long sections into subsections by some sort of common subtopic. This
|
||||
guideline is especially important when organizing Release Notes for
|
||||
new stable releases.
|
||||
|
||||
If a given changes stanza showed up in a different release (e.g.
|
||||
maint-0.2.1), be sure to make the stanzas identical (so people can
|
||||
distinguish if these are the same change).
|
||||
|
||||
3. Clean everything one last time.
|
||||
|
||||
4. Run `./scripts/maint/format_changelog.py --inplace` to make it prettier
|
||||
|
||||
1b. (old-stable release variant)
|
||||
|
||||
For stable releases that backport things from later, we try to compose
|
||||
their releases, we try to make sure that we keep the changelog entries
|
||||
identical to their original versions, with a "backport from 0.x.y.z"
|
||||
note added to each section. So in this case, once you have the items
|
||||
from the changes files copied together, don't use them to build a new
|
||||
changelog: instead, look up the corrected versions that were merged
|
||||
into ChangeLog in the main branch, and use those.
|
||||
|
||||
Add "backport from X.Y.Z" in the section header for these entries.
|
||||
|
||||
2. Compose a short release blurb to highlight the user-facing
|
||||
changes. Insert said release blurb into the ChangeLog stanza. If it's
|
||||
a stable release, add it to the ReleaseNotes file too. If we're adding
|
||||
to a release-* branch, manually commit the changelogs to the later
|
||||
git branches too.
|
||||
|
||||
3. If there are changes that require or suggest operator intervention
|
||||
before or during the update, mail operators (either dirauth or relays
|
||||
list) with a headline that indicates that an action is required or
|
||||
appreciated.
|
||||
|
||||
4. If you're doing the first stable release in a series, you need to
|
||||
create a ReleaseNotes for the series as a whole. To get started
|
||||
there, copy all of the Changelog entries from the series into a new
|
||||
file, and run `./scripts/maint/sortChanges.py` on it. That will
|
||||
group them by category. Then kill every bugfix entry for fixing
|
||||
bugs that were introduced within that release series; those aren't
|
||||
relevant changes since the last series. At that point, it's time
|
||||
to start sorting and condensing entries. (Generally, we don't edit the
|
||||
text of existing entries, though.)
|
||||
|
||||
## III. Making the source release.
|
||||
|
||||
1. In `maint-0.?.x`, bump the version number in `configure.ac` and run
|
||||
`./scripts/main/update_versions.py` to update version numbers in other
|
||||
places, and commit. Then merge `maint-0.?.x` into `release-0.?.x`.
|
||||
|
||||
When you merge the maint branch forward to the next maint branch, or into
|
||||
main, merge it with `-s ours` to avoid conflict with the version
|
||||
bump.
|
||||
|
||||
2. In `release-0.?.x`, run `make distcheck`, put the tarball up in somewhere
|
||||
(how about your homedir on people.torproject.org?) , and tell `#tor-dev`
|
||||
about it.
|
||||
|
||||
If you want, wait until at least one person has built it
|
||||
successfully. (We used to say "wait for others to test it", but our
|
||||
CI has successfully caught these kinds of errors for the last several
|
||||
years.)
|
||||
|
||||
3. Make sure that the new version is recommended in the latest consensus.
|
||||
(Otherwise, users will get confused when it complains to them
|
||||
about its status.)
|
||||
|
||||
If it is not, you'll need to poke Roger, Weasel, Sebastian, and Sina
|
||||
again: see the note at the start of the document.
|
||||
|
||||
## IV. Commit, upload, announce
|
||||
|
||||
1. Sign the tarball, then sign and push the git tag:
|
||||
|
||||
```console
|
||||
$ gpg -ba <the_tarball>
|
||||
$ git tag -s tor-0.4.x.y-<status>
|
||||
$ git push origin tag tor-0.4.x.y-<status>
|
||||
```
|
||||
|
||||
(You must do this before you update the website: the website scripts
|
||||
rely on finding the version by tag.)
|
||||
|
||||
(If your default PGP key is not the one you want to sign with, then say
|
||||
"-u <keyid>" instead of "-s".)
|
||||
|
||||
2. scp the tarball and its sig to the dist website, i.e.
|
||||
`/srv/dist-master.torproject.org/htdocs/` on dist-master. Run
|
||||
"static-update-component dist.torproject.org" on dist-master.
|
||||
|
||||
In the `project/web/tpo.git` repository, update `databags/versions.ini`
|
||||
to note the new version. Push these changes to `master`.
|
||||
|
||||
(NOTE: Due to #17805, there can only be one stable version listed at
|
||||
once. Nonetheless, do not call your version "alpha" if it is stable,
|
||||
or people will get confused.)
|
||||
|
||||
(NOTE: It will take a while for the website update scripts to update
|
||||
the website.)
|
||||
|
||||
3. Email the tor-packagers@lists.torproject.org mailing list to tell them
|
||||
about the new release.
|
||||
|
||||
Also, email tor-packagers@lists.torproject.org.
|
||||
|
||||
Mention where to download the tarball (`https://dist.torproject.org/`).
|
||||
|
||||
Include a link to the changelog.
|
||||
|
||||
4. Wait for the download page to be updated. (If you don't do this before you
|
||||
announce, people will be confused.)
|
||||
|
||||
5. Mail the release blurb and ChangeLog to tor-talk (development release) or
|
||||
tor-announce (stable).
|
||||
|
||||
Post the changelog on the blog as well. You can generate a
|
||||
blog-formatted version of the changelog with
|
||||
`./scripts/maint/format_changelog.py -B`
|
||||
|
||||
When you post, include an estimate of when the next TorBrowser
|
||||
releases will come out that include this Tor release. This will
|
||||
usually track https://wiki.mozilla.org/RapidRelease/Calendar , but it
|
||||
can vary.
|
||||
|
||||
For templates to use when announcing, see:
|
||||
https://gitlab.torproject.org/tpo/core/team/-/wikis/NetworkTeam/AnnouncementTemplates
|
||||
|
||||
## V. Aftermath and cleanup
|
||||
|
||||
1. If it's a stable release, bump the version number in the
|
||||
`maint-x.y.z` branch to "newversion-dev", and do a `merge -s ours`
|
||||
merge to avoid taking that change into main.
|
||||
|
||||
2. If there is a new `maint-x.y.z` branch, create a Travis CI cron job that
|
||||
builds the release every week. (It's ok to skip the weekly build if the
|
||||
branch was updated in the last 24 hours.)
|
||||
|
||||
3. Forward-port the ChangeLog (and ReleaseNotes if appropriate) to the
|
||||
main branch.
|
||||
|
||||
4. Keep an eye on the blog post, to moderate comments and answer questions.
|
||||
|
||||
## Appendix: An alternative means to notify packagers
|
||||
|
||||
If for some reason you need to contact a bunch of packagers without
|
||||
using the publicly archived tor-packagers list, you can try these
|
||||
people:
|
||||
|
||||
- {weasel,sysrqb,mikeperry} at torproject dot org
|
||||
- {blueness} at gentoo dot org
|
||||
- {paul} at invizbox dot io
|
||||
- {vincent} at invizbox dot com
|
||||
- {lfleischer} at archlinux dot org
|
||||
- {Nathan} at freitas dot net
|
||||
- {mike} at tig dot as
|
||||
- {tails-rm} at boum dot org
|
||||
- {simon} at sdeziel.info
|
||||
- {yuri} at freebsd.org
|
||||
- {mh+tor} at scrit.ch
|
||||
- {security} at brave.com
|
@ -1,518 +0,0 @@
|
||||
# Writing tests for Tor: an incomplete guide
|
||||
|
||||
Tor uses a variety of testing frameworks and methodologies to try to
|
||||
keep from introducing bugs. The major ones are:
|
||||
|
||||
1. Unit tests written in C and shipped with the Tor distribution.
|
||||
|
||||
2. Integration tests written in Python 2 (>= 2.7) or Python 3
|
||||
(>= 3.1) and shipped with the Tor distribution.
|
||||
|
||||
3. Integration tests written in Python and shipped with the Stem
|
||||
library. Some of these use the Tor controller protocol.
|
||||
|
||||
4. System tests written in Python and SH, and shipped with the
|
||||
Chutney package. These work by running many instances of Tor
|
||||
locally, and sending traffic through them.
|
||||
|
||||
5. The Shadow network simulator.
|
||||
|
||||
## How to run these tests
|
||||
|
||||
### The easy version
|
||||
|
||||
To run all the tests that come bundled with Tor, run `make check`.
|
||||
|
||||
To run the Stem tests as well, fetch stem from the git repository,
|
||||
set `STEM_SOURCE_DIR` to the checkout, and run `make test-stem`.
|
||||
|
||||
To run the Chutney tests as well, fetch chutney from the git repository,
|
||||
set `CHUTNEY_PATH` to the checkout, and run `make test-network`.
|
||||
|
||||
To run all of the above, run `make test-full`.
|
||||
|
||||
To run all of the above, plus tests that require a working connection to the
|
||||
internet, run `make test-full-online`.
|
||||
|
||||
### Running particular subtests
|
||||
|
||||
The Tor unit tests are divided into separate programs and a couple of
|
||||
bundled unit test programs.
|
||||
|
||||
Separate programs are easy. For example, to run the memwipe tests in
|
||||
isolation, you just run `./src/test/test-memwipe`.
|
||||
|
||||
To run tests within the unit test programs, you can specify the name
|
||||
of the test. The string ".." can be used as a wildcard at the end of the
|
||||
test name. For example, to run all the cell format tests, enter
|
||||
`./src/test/test cellfmt/..`.
|
||||
|
||||
Many tests that need to mess with global state run in forked subprocesses in
|
||||
order to keep from contaminating one another. But when debugging a failing test,
|
||||
you might want to run it without forking a subprocess. To do so, use the
|
||||
`--no-fork` option with a single test. (If you specify it along with
|
||||
multiple tests, they might interfere.)
|
||||
|
||||
You can turn on logging in the unit tests by passing one of `--debug`,
|
||||
`--info`, `--notice`, or `--warn`. By default only errors are displayed.
|
||||
|
||||
Unit tests are divided into `./src/test/test` and `./src/test/test-slow`.
|
||||
The former are those that should finish in a few seconds; the latter tend to
|
||||
take more time, and may include CPU-intensive operations, deliberate delays,
|
||||
and stuff like that.
|
||||
|
||||
## Finding test coverage
|
||||
|
||||
Test coverage is a measurement of which lines your tests actually visit.
|
||||
|
||||
When you configure Tor with the `--enable-coverage` option, it should
|
||||
build with support for coverage in the unit tests, and in a special
|
||||
`tor-cov` binary.
|
||||
|
||||
Then, run the tests you'd like to see coverage from. If you have old
|
||||
coverage output, you may need to run `reset-gcov` first.
|
||||
|
||||
Now you've got a bunch of files scattered around your build directories
|
||||
called `*.gcda`. In order to extract the coverage output from them, make a
|
||||
temporary directory for them and run `./scripts/test/coverage ${TMPDIR}`,
|
||||
where `${TMPDIR}` is the temporary directory you made. This will create a
|
||||
`.gcov` file for each source file under tests, containing that file's source
|
||||
annotated with the number of times the tests hit each line. (You'll need to
|
||||
have gcov installed.)
|
||||
|
||||
You can get a summary of the test coverage for each file by running
|
||||
`./scripts/test/cov-display ${TMPDIR}/*` . Each line lists the file's name,
|
||||
the number of uncovered lines, the number of uncovered lines, and the
|
||||
coverage percentage.
|
||||
|
||||
For a summary of the test coverage for each _function_, run
|
||||
`./scripts/test/cov-display -f ${TMPDIR}/*`.
|
||||
|
||||
For more details on using gcov, including the helper scripts in
|
||||
scripts/test, see HelpfulTools.md.
|
||||
|
||||
### Comparing test coverage
|
||||
|
||||
Sometimes it's useful to compare test coverage for a branch you're writing to
|
||||
coverage from another branch (such as git master, for example). But you
|
||||
can't run `diff` on the two coverage outputs directly, since the actual
|
||||
number of times each line is executed aren't so important, and aren't wholly
|
||||
deterministic.
|
||||
|
||||
Instead, follow the instructions above for each branch, creating a separate
|
||||
temporary directory for each. Then, run `./scripts/test/cov-diff ${D1}
|
||||
${D2}`, where D1 and D2 are the directories you want to compare. This will
|
||||
produce a diff of the two directories, with all lines normalized to be either
|
||||
covered or uncovered.
|
||||
|
||||
To count new or modified uncovered lines in D2, you can run:
|
||||
|
||||
```console
|
||||
$ ./scripts/test/cov-diff ${D1} ${D2}" | grep '^+ *\#' | wc -l
|
||||
```
|
||||
|
||||
## Marking lines as unreachable by tests
|
||||
|
||||
You can mark a specific line as unreachable by using the special
|
||||
string LCOV_EXCL_LINE. You can mark a range of lines as unreachable
|
||||
with LCOV_EXCL_START... LCOV_EXCL_STOP. Note that older versions of
|
||||
lcov don't understand these lines.
|
||||
|
||||
You can post-process .gcov files to make these lines 'unreached' by
|
||||
running ./scripts/test/cov-exclude on them. It marks excluded
|
||||
unreached lines with 'x', and excluded reached lines with '!!!'.
|
||||
|
||||
Note: you should never do this unless the line is meant to 100%
|
||||
unreachable by actual code.
|
||||
|
||||
## What kinds of test should I write?
|
||||
|
||||
Integration testing and unit testing are complementary: it's probably a
|
||||
good idea to make sure that your code is hit by both if you can.
|
||||
|
||||
If your code is very-low level, and its behavior is easily described in
|
||||
terms of a relation between inputs and outputs, or a set of state
|
||||
transitions, then it's a natural fit for unit tests. (If not, please
|
||||
consider refactoring it until most of it _is_ a good fit for unit
|
||||
tests!)
|
||||
|
||||
If your code adds new externally visible functionality to Tor, it would
|
||||
be great to have a test for that functionality. That's where
|
||||
integration tests more usually come in.
|
||||
|
||||
## Unit and regression tests: Does this function do what it's supposed to?
|
||||
|
||||
Most of Tor's unit tests are made using the "tinytest" testing framework.
|
||||
You can see a guide to using it in the tinytest manual at
|
||||
|
||||
https://github.com/nmathewson/tinytest/blob/master/tinytest-manual.md
|
||||
|
||||
To add a new test of this kind, either edit an existing C file in `src/test/`,
|
||||
or create a new C file there. Each test is a single function that must
|
||||
be indexed in the table at the end of the file. We use the label "done:" as
|
||||
a cleanup point for all test functions.
|
||||
|
||||
If you have created a new test file, you will need to:
|
||||
1. Add the new test file to include.am
|
||||
2. In `test.h`, include the new test cases (testcase_t)
|
||||
3. In `test.c`, add the new test cases to testgroup_t testgroups
|
||||
|
||||
(Make sure you read `tinytest-manual.md` before proceeding.)
|
||||
|
||||
I use the term "unit test" and "regression tests" very sloppily here.
|
||||
|
||||
## A simple example
|
||||
|
||||
Here's an example of a test function for a simple function in util.c:
|
||||
|
||||
```c
|
||||
static void
|
||||
test_util_writepid(void *arg)
|
||||
{
|
||||
(void) arg;
|
||||
|
||||
char *contents = NULL;
|
||||
const char *fname = get_fname("tmp_pid");
|
||||
unsigned long pid;
|
||||
char c;
|
||||
|
||||
write_pidfile(fname);
|
||||
|
||||
contents = read_file_to_str(fname, 0, NULL);
|
||||
tt_assert(contents);
|
||||
|
||||
int n = sscanf(contents, "%lu\n%c", &pid, &c);
|
||||
tt_int_op(n, OP_EQ, 1);
|
||||
tt_int_op(pid, OP_EQ, getpid());
|
||||
|
||||
done:
|
||||
tor_free(contents);
|
||||
}
|
||||
```
|
||||
|
||||
This should look pretty familiar to you if you've read the tinytest
|
||||
manual. One thing to note here is that we use the testing-specific
|
||||
function `get_fname` to generate a file with respect to a temporary
|
||||
directory that the tests use. You don't need to delete the file;
|
||||
it will get removed when the tests are done.
|
||||
|
||||
Also note our use of `OP_EQ` instead of `==` in the `tt_int_op()` calls.
|
||||
We define `OP_*` macros to use instead of the binary comparison
|
||||
operators so that analysis tools can more easily parse our code.
|
||||
(Coccinelle really hates to see `==` used as a macro argument.)
|
||||
|
||||
Finally, remember that by convention, all `*_free()` functions that
|
||||
Tor defines are defined to accept NULL harmlessly. Thus, you don't
|
||||
need to say `if (contents)` in the cleanup block.
|
||||
|
||||
## Exposing static functions for testing
|
||||
|
||||
Sometimes you need to test a function, but you don't want to expose
|
||||
it outside its usual module.
|
||||
|
||||
To support this, Tor's build system compiles a testing version of
|
||||
each module, with extra identifiers exposed. If you want to
|
||||
declare a function as static but available for testing, use the
|
||||
macro `STATIC` instead of `static`. Then, make sure there's a
|
||||
macro-protected declaration of the function in the module's header.
|
||||
|
||||
For example, `crypto_curve25519.h` contains:
|
||||
|
||||
```c
|
||||
#ifdef CRYPTO_CURVE25519_PRIVATE
|
||||
STATIC int curve25519_impl(uint8_t *output, const uint8_t *secret,
|
||||
const uint8_t *basepoint);
|
||||
#endif
|
||||
```
|
||||
|
||||
The `crypto_curve25519.c` file and the `test_crypto.c` file both define
|
||||
`CRYPTO_CURVE25519_PRIVATE`, so they can see this declaration.
|
||||
|
||||
## STOP! Does this test really test?
|
||||
|
||||
When writing tests, it's not enough to just generate coverage on all the
|
||||
lines of the code that you're testing: It's important to make sure that
|
||||
the test _really tests_ the code.
|
||||
|
||||
For example, here is a _bad_ test for the unlink() function (which is
|
||||
supposed to remove a file).
|
||||
|
||||
```c
|
||||
static void
|
||||
test_unlink_badly(void *arg)
|
||||
{
|
||||
(void) arg;
|
||||
int r;
|
||||
|
||||
const char *fname = get_fname("tmpfile");
|
||||
|
||||
/* If the file isn't there, unlink returns -1 and sets ENOENT */
|
||||
r = unlink(fname);
|
||||
tt_int_op(n, OP_EQ, -1);
|
||||
tt_int_op(errno, OP_EQ, ENOENT);
|
||||
|
||||
/* If the file DOES exist, unlink returns 0. */
|
||||
write_str_to_file(fname, "hello world", 0);
|
||||
r = unlink(fnme);
|
||||
tt_int_op(r, OP_EQ, 0);
|
||||
|
||||
done:
|
||||
tor_free(contents);
|
||||
}
|
||||
```
|
||||
|
||||
This test might get very high coverage on unlink(). So why is it a
|
||||
bad test? Because it doesn't check that unlink() *actually removes the
|
||||
named file*!
|
||||
|
||||
Remember, the purpose of a test is to succeed if the code does what
|
||||
it's supposed to do, and fail otherwise. Try to design your tests so
|
||||
that they check for the code's intended and documented functionality
|
||||
as much as possible.
|
||||
|
||||
## Mock functions for testing in isolation
|
||||
|
||||
Often we want to test that a function works right, but the function to
|
||||
be tested depends on other functions whose behavior is hard to observe,
|
||||
or which require a working Tor network, or something like that.
|
||||
|
||||
To write tests for this case, you can replace the underlying functions
|
||||
with testing stubs while your unit test is running. You need to declare
|
||||
the underlying function as 'mockable', as follows:
|
||||
|
||||
```c
|
||||
MOCK_DECL(returntype, functionname, (argument list));
|
||||
```
|
||||
|
||||
and then later implement it as:
|
||||
|
||||
```c
|
||||
MOCK_IMPL(returntype, functionname, (argument list))
|
||||
{
|
||||
/* implementation here */
|
||||
}
|
||||
```
|
||||
|
||||
For example, if you had a 'connect to remote server' function, you could
|
||||
declare it as:
|
||||
|
||||
```c
|
||||
MOCK_DECL(int, connect_to_remote, (const char *name, status_t *status));
|
||||
```
|
||||
|
||||
When you declare a function this way, it will be declared as normal in
|
||||
regular builds, but when the module is built for testing, it is declared
|
||||
as a function pointer initialized to the actual implementation.
|
||||
|
||||
In your tests, if you want to override the function with a temporary
|
||||
replacement, you say:
|
||||
|
||||
```c
|
||||
MOCK(functionname, replacement_function_name);
|
||||
```
|
||||
|
||||
And later, you can restore the original function with:
|
||||
|
||||
```c
|
||||
UNMOCK(functionname);
|
||||
```
|
||||
|
||||
For more information, see the definitions of this mocking logic in
|
||||
`testsupport.h`.
|
||||
|
||||
## Okay but what should my tests actually do?
|
||||
|
||||
We talk above about "test coverage" -- making sure that your tests visit
|
||||
every line of code, or every branch of code. But visiting the code isn't
|
||||
enough: we want to verify that it's correct.
|
||||
|
||||
So when writing tests, try to make tests that should pass with any correct
|
||||
implementation of the code, and that should fail if the code doesn't do what
|
||||
it's supposed to do.
|
||||
|
||||
You can write "black-box" tests or "glass-box" tests. A black-box test is
|
||||
one that you write without looking at the structure of the function. A
|
||||
glass-box one is one you implement while looking at how the function is
|
||||
implemented.
|
||||
|
||||
In either case, make sure to consider common cases *and* edge cases; success
|
||||
cases and failure cases.
|
||||
|
||||
For example, consider testing this function:
|
||||
|
||||
```c
|
||||
/** Remove all elements E from sl such that E==element. Preserve
|
||||
* the order of any elements before E, but elements after E can be
|
||||
* rearranged.
|
||||
*/
|
||||
void smartlist_remove(smartlist_t *sl, const void *element);
|
||||
```
|
||||
|
||||
In order to test it well, you should write tests for at least all of the
|
||||
following cases. (These would be black-box tests, since we're only looking
|
||||
at the declared behavior for the function:
|
||||
|
||||
* Remove an element that is in the smartlist.
|
||||
* Remove an element that is not in the smartlist.
|
||||
* Remove an element that appears in the smartlist more than once.
|
||||
|
||||
And your tests should verify that it behaves correct. At minimum, you should
|
||||
test:
|
||||
|
||||
* That other elements before E are in the same order after you call the
|
||||
functions.
|
||||
* That the target element is really removed.
|
||||
* That _only_ the target element is removed.
|
||||
|
||||
When you consider edge cases, you might try:
|
||||
|
||||
* Remove an element from an empty list.
|
||||
* Remove an element from a singleton list containing that element.
|
||||
* Remove an element for a list containing several instances of that
|
||||
element, and nothing else.
|
||||
|
||||
Now let's look at the implementation:
|
||||
|
||||
```c
|
||||
void
|
||||
smartlist_remove(smartlist_t *sl, const void *element)
|
||||
{
|
||||
int i;
|
||||
if (element == NULL)
|
||||
return;
|
||||
for (i=0; i < sl->num_used; i++)
|
||||
if (sl->list[i] == element) {
|
||||
sl->list[i] = sl->list[--sl->num_used]; /* swap with the end */
|
||||
i--; /* so we process the new i'th element */
|
||||
sl->list[sl->num_used] = NULL;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Based on the implementation, we now see three more edge cases to test:
|
||||
|
||||
* Removing NULL from the list.
|
||||
* Removing an element from the end of the list
|
||||
* Removing an element from a position other than the end of the list.
|
||||
|
||||
## What should my tests NOT do?
|
||||
|
||||
Tests shouldn't require a network connection.
|
||||
|
||||
Whenever possible, tests shouldn't take more than a second. Put the test
|
||||
into test/slow if it genuinely needs to be run.
|
||||
|
||||
Tests should not alter global state unless they run with `TT_FORK`: Tests
|
||||
should not require other tests to be run before or after them.
|
||||
|
||||
Tests should not leak memory or other resources. To find out if your tests
|
||||
are leaking memory, run them under valgrind (see HelpfulTools.txt for more
|
||||
information on how to do that).
|
||||
|
||||
When possible, tests should not be over-fit to the implementation. That is,
|
||||
the test should verify that the documented behavior is implemented, but
|
||||
should not break if other permissible behavior is later implemented.
|
||||
|
||||
## Advanced techniques: Namespaces
|
||||
|
||||
Sometimes, when you're doing a lot of mocking at once, it's convenient to
|
||||
isolate your identifiers within a single namespace. If this were C++, we'd
|
||||
already have namespaces, but for C, we do the best we can with macros and
|
||||
token-pasting.
|
||||
|
||||
We have some macros defined for this purpose in `src/test/test.h`. To use
|
||||
them, you define `NS_MODULE` to a prefix to be used for your identifiers, and
|
||||
then use other macros in place of identifier names. See `src/test/test.h` for
|
||||
more documentation.
|
||||
|
||||
## Integration tests: Calling Tor from the outside
|
||||
|
||||
Some tests need to invoke Tor from the outside, and shouldn't run from the
|
||||
same process as the Tor test program. Reasons for doing this might include:
|
||||
|
||||
* Testing the actual behavior of Tor when run from the command line
|
||||
* Testing that a crash-handler correctly logs a stack trace
|
||||
* Verifying that violating a sandbox or capability requirement will
|
||||
actually crash the program.
|
||||
* Needing to run as root in order to test capability inheritance or
|
||||
user switching.
|
||||
|
||||
To add one of these, you generally want a new C program in `src/test`. Add it
|
||||
to `TESTS` and `noinst_PROGRAMS` if it can run on its own and return success or
|
||||
failure. If it needs to be invoked multiple times, or it needs to be
|
||||
wrapped, add a new shell script to `TESTS`, and the new program to
|
||||
`noinst_PROGRAMS`. If you need access to any environment variable from the
|
||||
makefile (eg `${PYTHON}` for a python interpreter), then make sure that the
|
||||
makefile exports them.
|
||||
|
||||
## Writing integration tests with Stem
|
||||
|
||||
The 'stem' library includes extensive tests for the Tor controller protocol.
|
||||
You can run stem tests from tor with `make test-stem`, or see
|
||||
`https://stem.torproject.org/faq.html#how-do-i-run-the-tests`.
|
||||
|
||||
To see what tests are available, have a look around the `test/*` directory in
|
||||
stem. The first thing you'll notice is that there are both `unit` and `integ`
|
||||
tests. The former are for tests of the facilities provided by stem itself that
|
||||
can be tested on their own, without the need to hook up a tor process. These
|
||||
are less relevant, unless you want to develop a new stem feature. The latter,
|
||||
however, are a very useful tool to write tests for controller features. They
|
||||
provide a default environment with a connected tor instance that can be
|
||||
modified and queried. Adding more integration tests is a great way to increase
|
||||
the test coverage inside Tor, especially for controller features.
|
||||
|
||||
Let's assume you actually want to write a test for a previously untested
|
||||
controller feature. I'm picking the `exit-policy/*` GETINFO queries. Since
|
||||
these are a controller feature that we want to write an integration test for,
|
||||
the right file to modify is
|
||||
`https://gitlab.torproject.org/tpo/network-health/stem/-/blob/master/test/integ/control/controller.py`.
|
||||
|
||||
First off we notice that there is an integration test called
|
||||
`test_get_exit_policy()` that's already written. This exercises the interaction
|
||||
of stem's `Controller.get_exit_policy()` method, and is not relevant for our
|
||||
test since there are no stem methods to make use of all `exit-policy/*`
|
||||
queries (if there were, likely they'd be tested already. Maybe you want to
|
||||
write a stem feature, but I chose to just add tests).
|
||||
|
||||
Our test requires a tor controller connection, so we'll use the
|
||||
`@require_controller` annotation for our `test_exit_policy()` method. We need a
|
||||
controller instance, which we get from
|
||||
`test.runner.get_runner().get_tor_controller()`. The attached Tor instance is
|
||||
configured as a client, but the exit-policy GETINFO queries need a relay to
|
||||
work, so we have to change the config (using `controller.set_options()`). This
|
||||
is OK for us to do, we just have to remember to set DisableNetwork so we don't
|
||||
actually start an exit relay and also to undo the changes we made (by calling
|
||||
`controller.reset_conf()` at the end of our test). Additionally, we have to
|
||||
configure a static Address for Tor to use, because it refuses to build a
|
||||
descriptor when it can't guess a suitable IP address. Unfortunately, these
|
||||
kinds of tripwires are everywhere. Don't forget to file appropriate tickets if
|
||||
you notice any strange behaviour that seems totally unreasonable.
|
||||
|
||||
Check out the `test_exit_policy()` function in abovementioned file to see the
|
||||
final implementation for this test.
|
||||
|
||||
## System testing with Chutney
|
||||
|
||||
The 'chutney' program configures and launches a set of Tor relays,
|
||||
authorities, and clients on your local host. It has a `test network`
|
||||
functionality to send traffic through them and verify that the traffic
|
||||
arrives correctly.
|
||||
|
||||
You can write new test networks by adding them to `networks`. To add
|
||||
them to Tor's tests, add them to the `test-network` or `test-network-all`
|
||||
targets in `Makefile.am`.
|
||||
|
||||
(Adding new kinds of program to chutney will still require hacking the
|
||||
code.)
|
||||
|
||||
## Other integration tests
|
||||
|
||||
It's fine to write tests that use a POSIX shell to invoke Tor or test other
|
||||
aspects of the system. When you do this, have a look at our existing tests
|
||||
of this kind in `src/test/` to make sure that you haven't forgotten anything
|
||||
important. For example: it can be tricky to make sure you're invoking Tor at
|
||||
the right path in various build scenarios.
|
||||
|
||||
We use a POSIX shell whenever possible here, and we use the shellcheck tool
|
||||
to make sure that our scripts portable. We should only require bash for
|
||||
scripts that are developer-only.
|
@ -1,105 +0,0 @@
|
||||
# Using `simpleperf` to collect CPU profiling on Android
|
||||
|
||||
This document describes how you can use Android's `simpleperf`
|
||||
command-line tool to get CPU profiling information from Tor via the
|
||||
Orbot application. The tool is particularly useful for Tor development
|
||||
because it is able to profile native applications on the platform
|
||||
whereas a lot of the normal tooling for the Android platform is only
|
||||
able to collect information from Java-based applications.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before using `simpleperf` there is a couple of steps that must be
|
||||
followed. You should make sure you have both a recent installation of
|
||||
the Android Software Development Kit (SDK) and Native Development Kit
|
||||
(NDK) installed. These can be found on the Android Developers website.
|
||||
|
||||
1. Follow the build instructions from the `BUILD` file in the Orbot
|
||||
repository and build an Orbot APK (Android Package) file with
|
||||
debugging enabled. Make sure that when you build the native content of
|
||||
the Orbot application that you run the `make -C external` command with
|
||||
an additional `DEBUG=1` as parameter to ensure that the Orbot build
|
||||
process does not strip the debug symbols from the Tor binary.
|
||||
|
||||
2. (Optional) Uninstall and clean-up your old Orbot installation that
|
||||
is most likely downloaded from Google's Play Store or via fdroid:
|
||||
|
||||
$ adb shell pm clear org.torproject.android
|
||||
$ adb uninstall org.torproject.android
|
||||
|
||||
3. Install the Android Package you generated in step 1:
|
||||
|
||||
```bash
|
||||
$ adb install /path/to/your/app-fullperm-debug.apk
|
||||
```
|
||||
|
||||
4. Check on your device that the newly installed Orbot actually works
|
||||
and behaves in the way you expect it to.
|
||||
|
||||
## Profiling using `simpleperf`
|
||||
|
||||
The `simpleperf` tool can be found in the `simpleperf/` directory in
|
||||
the directory where you installed the Android NDK to. In this
|
||||
directory there is a set of Python files that will help you deploy the
|
||||
tool to a device and collect the measurement data such that you can
|
||||
analyze the results on your computer rather than on your phone.
|
||||
|
||||
1. Change directory to the location of the `simpleperf` directory.
|
||||
2. Open the `app_profiler.config` file and change
|
||||
`app_package_name` to `org.torproject.android`, `apk_file_path` to
|
||||
the path of your Orbot Android Package (APK file).
|
||||
3. Optionally change the duration parameter in the `record_options`
|
||||
variable in `app_profiler.config` to the duration which you would like
|
||||
to collect samples in. The value is specified in seconds.
|
||||
4. Run the app profiler using `python app_profiler.py`. This helper
|
||||
script will push the `simpleperf` tool to your device, start the
|
||||
profiler, and once it has completed copy the generated `perf.data`
|
||||
file over to your computer with the results.
|
||||
|
||||
### Analyzing the results
|
||||
|
||||
You can inspect your resulting `perf.data` file via a simple GUI
|
||||
program `python report.py` or via the command-line tool `simpleperf
|
||||
report`. I've found the GUI tool to be easier to navigate around with
|
||||
than the command-line tool.
|
||||
|
||||
The `-g` option can be passed to the command line `simpleperf report`
|
||||
tool allows you to see the call graph of functions and how much time
|
||||
was spend on the call.
|
||||
|
||||
## Tips & Tricks
|
||||
|
||||
- When you have installed Orbot the first time, you will notice that
|
||||
if you get a shell on the Android device that there is no Tor binary
|
||||
available. This is because Orbot unpacks the Tor binary first time it
|
||||
is executed and places it under the `app_bin/` directory on the
|
||||
device.
|
||||
|
||||
To access binaries, `torrc` files, and other useful information on
|
||||
the device do the following:
|
||||
|
||||
```console
|
||||
$ adb shell
|
||||
(device):/ $ run-as org.torproject.android
|
||||
(device):/data/data/org.torproject.android $ ls
|
||||
app_bin app_data cache databases files lib shared_prefs
|
||||
```
|
||||
|
||||
Descriptors, control authentication cookie, state, and other files can be
|
||||
found in the `app_data` directory. The `torrc` can be found in the `app_bin/`
|
||||
directory.
|
||||
|
||||
- You can enable logging in Tor via the syslog (or android) log
|
||||
mechanism with:
|
||||
|
||||
```console
|
||||
$ adb shell
|
||||
(device):/ $ run-as org.torproject.android
|
||||
(device):/data/data/org.torproject.android $ echo -e "\nLog info syslog" >> app_bin/torrc
|
||||
```
|
||||
|
||||
Start Tor the normal way via Orbot and collect the logs from your computer using
|
||||
|
||||
```console
|
||||
$ adb logcat
|
||||
```
|
@ -1,139 +0,0 @@
|
||||
# Circuit Subsystem Trace Events
|
||||
|
||||
The circuit subsystem emits a series of tracing events related to a circuit
|
||||
object life cycle and its state change.
|
||||
|
||||
This document describes each event as in what data they record and what they
|
||||
represent.
|
||||
|
||||
## Background
|
||||
|
||||
There are two types of circuits: origin and OR (onion router). Both of them
|
||||
are derived from a base object called a general circuit.
|
||||
|
||||
- Origin circuits are the ones initiated by tor itself so client or onion
|
||||
service circuits for instance.
|
||||
|
||||
- OR circuits are the ones going through us that we have not initiated and
|
||||
thus only seen by relays.
|
||||
|
||||
Many operations are done on the base (general) circuit, and some are specific
|
||||
to an origin or OR. The following section describes each of them by circuit
|
||||
type.
|
||||
|
||||
## Trace Events
|
||||
|
||||
For the LTTng tracer, the subsystem name of these events is: `tor_circuit`.
|
||||
|
||||
Also, unless specified otherwise, every event emits a common set of parameters
|
||||
thus they should always be expected in the following order:
|
||||
|
||||
- `circ_id`: For an origin circuit, this is the global circuit identifier used
|
||||
in a cell. For an OR circuit, the value is 0.
|
||||
|
||||
- `purpose`: Purpose of the circuit as in what it is used for. Note that this
|
||||
can change during the lifetime of a circuit. See `CIRCUIT_PURPOSE_*` in
|
||||
`core/or/circuitlist.h` for an exhaustive list of the possible values.
|
||||
|
||||
- `state`: State of a circuit. This changes during the lifetime of a circuit.
|
||||
See `CIRCUIT_STATE_*` in `core/or/circuitlist.h` for an exhaustive list of
|
||||
the possible values.
|
||||
|
||||
Now, the tracing events.
|
||||
|
||||
### General Circuit (`circuit_t`)
|
||||
|
||||
The following events are triggered for the base circuit object and thus apply
|
||||
to all types of circuits.
|
||||
|
||||
* `free`: A circuit object is freed that is memory is released and not
|
||||
usable anymore. After this event, no more events will be emitted for the
|
||||
specific circuit object.
|
||||
|
||||
* `mark_for_close`: A circuit object is marked for close that is scheduled
|
||||
to be closed in a later mainloop periodic event.
|
||||
|
||||
Extra parameters:
|
||||
|
||||
- `end_reason`: Reason why the circuit is closed. Tor often changes that
|
||||
reason to something generic sometimes in order to avoid leaking internal
|
||||
reasons to the end point. Thus, this value can be different from
|
||||
orig_close_reason.
|
||||
|
||||
- `orig_close_reason`: Original reason why the circuit is closed. That
|
||||
value never changes and contains the internal reason why we close it. It
|
||||
is **never** this reason that is sent back on the circuit.
|
||||
|
||||
* `change_purpose`: Purpose change.
|
||||
|
||||
Extra parameters:
|
||||
|
||||
(`purpose` parameter is not present)
|
||||
|
||||
- `old_purpose`: Previous purpose that is no longer.
|
||||
|
||||
- `new_purpose`: New purpose assigned to the circuit.
|
||||
|
||||
* `change_state`: State change.
|
||||
|
||||
Extra parameters:
|
||||
|
||||
(`state` parameter is not present)
|
||||
|
||||
- `old_state`: Previous state that is no longer.
|
||||
|
||||
- `new_state`: New state assigned to the circuit.
|
||||
|
||||
### Origin Circuit (`origin_circuit_t`)
|
||||
|
||||
The following events are triggered only for origin circuits.
|
||||
|
||||
* `new_origin`: New origin circuit has been created meaning it has been
|
||||
newly allocated, initialized and added to the global list.
|
||||
|
||||
* `establish`: Circuit is being established. This is the initial first step
|
||||
where the path was selected and a connection to the first hop has been
|
||||
launched.
|
||||
|
||||
* `cannibalized`: Circuit has been cannibalized. This happens when we have
|
||||
an already opened unused circuit (preemptive circuits) and it was picked.
|
||||
|
||||
* `first_onion_skin`: First onion skin was sent that is the handshake with
|
||||
the first hop.
|
||||
|
||||
Extra parameters:
|
||||
|
||||
- `fingerprint`: Identity digest (RSA) of the first hop.
|
||||
|
||||
* `intermediate_onion_skin`: An intermediate onion skin was sent which can
|
||||
be why any hops after the first one. There is thus `N - 1` of these events
|
||||
where `N` is the total number of hops in the path.
|
||||
|
||||
Extra parameters:
|
||||
|
||||
- `fingerprint`: Identity digest (RSA) of the next hop.
|
||||
|
||||
* `opened`: Circuit just became opened which means that all hops down the
|
||||
path have negotiated the handshake between them and us and the circuit is
|
||||
now ready to send cells.
|
||||
|
||||
* `timeout`: Circuit has timed out that is we waited too long for the
|
||||
circuit to be built.
|
||||
|
||||
* `idle_timeout`: Circuit has timed out due to idleness. This is controlled
|
||||
by the MaxCircuitDirtiness parameter which is 10 min by default.
|
||||
|
||||
For the common use case of a 3-hop circuit, the following events should be
|
||||
seen in this order:
|
||||
|
||||
`new_origin` -> `establish` -> `first_onion_skin` ->
|
||||
`intermediate_onion_skin` -> `intermediate_onion_skin` -> `opened`
|
||||
|
||||
### OR Circuit (`or_circuit_t`)
|
||||
|
||||
The following events are triggered only for OR circuits. For each of them, the
|
||||
`circ_id` parameter is not present since it would always be 0. The `purpose`
|
||||
and `state` remain.
|
||||
|
||||
* `new_or`: New OR circuit has been created meaning it has been newly
|
||||
allocated, initialized and added to the global list.
|
@ -1,163 +0,0 @@
|
||||
# Tracing
|
||||
|
||||
This document describes how the event tracing subsystem works in tor so
|
||||
developers can add events to the code base but also hook them to an event
|
||||
tracing framework (i.e. tracer).
|
||||
|
||||
## WARNING ##
|
||||
|
||||
Tracing the tor daemon **always** generates sensitive data if used in
|
||||
production (on the public network).
|
||||
|
||||
It **is** ethical for researchers to use tracing for their own tor client (for
|
||||
example: building paths, timings, or performance).
|
||||
|
||||
It is **NOT** ethical to archive, publish or keep data containing other users'
|
||||
activity such as relay data or anything that handles users' traffic. This
|
||||
of course includes any logs below notice level.
|
||||
|
||||
Publishing analysis of tracing data containing user traffic is **NOT** safe
|
||||
either.
|
||||
|
||||
In other words, tracing data that contains other users's activity is **NOT**
|
||||
safe to publish in any form.
|
||||
|
||||
## Basics ###
|
||||
|
||||
Tracing is separated in two different concepts. The tracing API and the
|
||||
tracing probes.
|
||||
|
||||
The API is in `src/lib/trace/` which defines how to call tracepoints in the
|
||||
tor code. Every C files should include `src/lib/trace/events.h` if they want
|
||||
to call a tracepoint.
|
||||
|
||||
The probes are what actually record the tracepoint data. Because they often
|
||||
need to access specific subsystem objects, the probes are within each
|
||||
subsystem. They are defined in the `trace-probes-<subsystem>.c` files.
|
||||
|
||||
### Events
|
||||
|
||||
A trace event is basically a function from which we can pass any data that we
|
||||
want to collect. In addition, we specify a context for the event such as the
|
||||
subsystem and an event name.
|
||||
|
||||
A trace event in tor has the following standard format:
|
||||
|
||||
```c
|
||||
tor_trace(subsystem, event_name, args...);
|
||||
```
|
||||
|
||||
The `subsystem` parameter is the name of the subsystem the trace event is in.
|
||||
For example that could be "scheduler" or "vote" or "hs". The idea is to add
|
||||
some context to the event so when we collect them we know where it's coming
|
||||
from.
|
||||
|
||||
The `event_name` is the name of the event which adds better semantic to the
|
||||
event.
|
||||
|
||||
The `args` can be any number of arguments we want to collect.
|
||||
|
||||
Here is an example of a possible tracepoint in main():
|
||||
|
||||
```c
|
||||
tor_trace(main, init_phase, argc);
|
||||
```
|
||||
|
||||
The above is a tracepoint in the `main` subsystem with `init_phase` as the
|
||||
event name and the `int argc` is passed to the event as one argument.
|
||||
|
||||
How `argc` is collected or used has nothing to do with the instrumentation
|
||||
(adding trace events to the code). It is the work of the tracer so this is why
|
||||
the trace events and collection framework (tracer) are decoupled. You _can_
|
||||
have trace events without a tracer.
|
||||
|
||||
### Instrumentation ###
|
||||
|
||||
In `src/lib/trace/events.h`, we map the high level `tor_trace()` macro to one
|
||||
or many enabled instrumentation.
|
||||
|
||||
Currently, we have 3 types of possible instrumentation:
|
||||
|
||||
1. Debug
|
||||
|
||||
This will map every tracepoint to `log_debug()`. However, none of the
|
||||
arguments will be passed on because we don't know their type nor the string
|
||||
format of the debug log. The output is standardized like this:
|
||||
|
||||
```
|
||||
[debug] __FUNC__: Tracepoint <event_name> from subsystem <subsystem> hit.
|
||||
```
|
||||
|
||||
2. USDT
|
||||
|
||||
User Statically-Defined Tracing (USDT) is a kind of probe which can be
|
||||
handled by a variety of tracers such as SystemTap, DTrace, perf, eBPF and
|
||||
ftrace.
|
||||
|
||||
For each tracer, one will need to define the ABI in order for the tracer to
|
||||
be able to extract the data from the tracepoint objects. For instance, the
|
||||
tracer needs to know how to print the circuit state of a `circuit_t`
|
||||
object.
|
||||
|
||||
3. LTTng-UST
|
||||
|
||||
LTTng Userspace is a tracer that has it own type of instrumentation. The
|
||||
probe definitions are created within the C code and is strongly typed.
|
||||
|
||||
For more information, see https://lttng.org/docs.
|
||||
|
||||
## Build System
|
||||
|
||||
This section describes how the instrumentation is integrated into the build
|
||||
system of tor.
|
||||
|
||||
By default, every tracing events are disabled in tor that is `tor_trace()` is
|
||||
a NOP thus has no execution cost time.
|
||||
|
||||
To enable a specific instrumentation, there are configure options:
|
||||
|
||||
1. Debug: `--enable-tracing-instrumentation-debug`
|
||||
|
||||
2. USDT: `--enable-tracing-instrumentation-usdt`
|
||||
|
||||
3. LTTng: `--enable-tracing-instrumentation-lttng`
|
||||
|
||||
They can all be used together or independently. If one of them is set,
|
||||
`HAVE_TRACING` define is set. And for each instrumentation, a
|
||||
`USE_TRACING_INSTRUMENTATION_<type>` is set.
|
||||
|
||||
## Adding a Tracepoint ##
|
||||
|
||||
This is pretty easy. Let's say you want to add a trace event in
|
||||
`src/feature/rend/rendcache.c`, you first need to include this file:
|
||||
|
||||
```c
|
||||
#include "lib/trace/events.h"
|
||||
```
|
||||
|
||||
Then, the `tor_trace()` macro can be used with the specific format detailed
|
||||
before in a previous section. As an example:
|
||||
|
||||
```c
|
||||
tor_trace(hs, store_desc_as_client, desc, desc_id);
|
||||
```
|
||||
|
||||
For `Debug` instrumentation, you have nothing else to do.
|
||||
|
||||
For `USDT`, instrumentation, you will need to define the probes in a way the
|
||||
specific tracer can understand. For instance, SystemTap requires you to define
|
||||
a `tapset` for each tracepoints.
|
||||
|
||||
For `LTTng`, you will need to define the probes in the
|
||||
`trace-probes-<subsystem>.{c|h}` file. See the `trace-probes-circuit.{c|h}`
|
||||
file as an example and https://lttng.org/docs/v2.11/#doc-instrumenting.
|
||||
|
||||
## Performance ##
|
||||
|
||||
A word about performance when a tracepoint is enabled. One of the goal of a
|
||||
tracepoint (USDT, LTTng-UST, ...) is that they can be enabled or disabled. By
|
||||
default, they are disabled which means the tracer will not record the data but
|
||||
it has to do a check thus the cost is basically the one of a `branch`.
|
||||
|
||||
If enabled, then the performance depends on the tracer. In the case of
|
||||
LTTng-UST, the event costs around 110nsec.
|
85
doc/TUNING
85
doc/TUNING
@ -1,85 +0,0 @@
|
||||
Most operating systems limit an amount of TCP sockets that can be used
|
||||
simultaneously. It is possible for a busy Tor relay to run into these
|
||||
limits, thus being unable to fully utilize the bandwidth resources it
|
||||
has at its disposal. Following system-specific tips might be helpful
|
||||
to alleviate the aforementioned problem.
|
||||
|
||||
Linux
|
||||
-----
|
||||
|
||||
Use 'ulimit -n' to raise an allowed number of file descriptors to be
|
||||
opened on your host at the same time.
|
||||
|
||||
FreeBSD
|
||||
-------
|
||||
|
||||
Tune the following sysctl(8) variables:
|
||||
* kern.maxfiles - maximum allowed file descriptors (for entire system)
|
||||
* kern.maxfilesperproc - maximum file descriptors one process is allowed
|
||||
to use
|
||||
* kern.ipc.maxsockets - overall maximum numbers of sockets for entire
|
||||
system
|
||||
* kern.ipc.somaxconn - size of listen queue for incoming TCP connections
|
||||
for entire system
|
||||
|
||||
See also:
|
||||
* https://www.freebsd.org/doc/handbook/configtuning-kernel-limits.html
|
||||
* https://wiki.freebsd.org/NetworkPerformanceTuning
|
||||
|
||||
Mac OS X
|
||||
--------
|
||||
|
||||
Since Mac OS X is BSD-based system, most of the above hold for OS X as well.
|
||||
However, launchd(8) is known to modify kern.maxfiles and kern.maxfilesperproc
|
||||
when it launches tor service (see launchd.plist(5) manpage). Also,
|
||||
kern.ipc.maxsockets is determined dynamically by the system and thus is
|
||||
read-only on OS X.
|
||||
|
||||
OpenBSD
|
||||
-------
|
||||
|
||||
Because OpenBSD is primarily focused on security and stability, it uses default
|
||||
resource limits stricter than those of more popular Unix-like operating systems.
|
||||
|
||||
OpenBSD stores a kernel-level file descriptor limit in the sysctl variable
|
||||
kern.maxfiles. It defaults to 7,030. To change it to, for example, 16,000 while
|
||||
the system is running, use the command 'sudo sysctl kern.maxfiles=16000'.
|
||||
kern.maxfiles will reset to the default value upon system reboot unless you also
|
||||
add 'kern.maxfiles=16000' to the file /etc/sysctl.conf.
|
||||
|
||||
There are stricter resource limits set on user classes, which are stored in
|
||||
/etc/login.conf. This config file also allows limit sets for daemons started
|
||||
with scripts in the /etc/rc.d directory, which presumably includes Tor.
|
||||
|
||||
To increase the file descriptor limit from its default of 1,024, add the
|
||||
following to /etc/login.conf:
|
||||
|
||||
tor:\
|
||||
:openfiles-max=13500:\
|
||||
:tc=daemon:
|
||||
|
||||
Upon restarting Tor, it will be able to open up to 13,500 file descriptors.
|
||||
|
||||
This will work *only* if you are starting Tor with the script /etc/rc.d/tor. If
|
||||
you're using a custom build instead of the package, you can easily copy the rc.d
|
||||
script from the Tor port directory. Alternatively, you can ensure that the Tor's
|
||||
daemon user has its own user class and make a /etc/login.conf entry for it.
|
||||
|
||||
High-bandwidth relays sometimes give the syslog warning:
|
||||
|
||||
/bsd: WARNING: mclpools limit reached; increase kern.maxclusters
|
||||
|
||||
In this case, increase kern.maxclusters with the sysctl command and in the file
|
||||
/etc/sysctl.conf, as described with kern.maxfiles above. Use 'sysctl
|
||||
kern.maxclusters' to query the current value. Increasing by about 15% per day
|
||||
until the error no longer appears is a good guideline.
|
||||
|
||||
Disclaimer
|
||||
----------
|
||||
|
||||
Do note that this document is a draft and above information may be
|
||||
technically incorrect and/or incomplete. If so, please open a ticket
|
||||
on https://gitlab.torproject.org or post to tor-relays mailing list.
|
||||
|
||||
Are you running a busy Tor relay? Let us know how you are solving
|
||||
the out-of-sockets problem on your system.
|
@ -1,70 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Copyright (c) The Tor Project, Inc.
|
||||
# See LICENSE for licensing information
|
||||
# Run this to generate .html.in or .1.in files from asciidoc files.
|
||||
# Arguments:
|
||||
# html|man asciidocpath outputfile
|
||||
|
||||
set -e
|
||||
|
||||
if [ $# != 3 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
SOURCE_DATE_EPOCH="$(git -C "$(dirname "$0")" show --no-patch --format='%ct')"
|
||||
export SOURCE_DATE_EPOCH
|
||||
|
||||
output=$3
|
||||
|
||||
if [ "$1" = "html" ]; then
|
||||
input=${output%%.html.in}.1.txt
|
||||
base=${output%%.html.in}
|
||||
|
||||
if [ "$2" != none ]; then
|
||||
TZ=UTC "$2" -f "$(dirname "$0")/nofooter.conf" -d manpage -o "$output" "$input";
|
||||
else
|
||||
echo "=================================="
|
||||
echo
|
||||
echo "You need asciidoc installed to be able to build the manpage."
|
||||
echo "To build without manpages, use the --disable-asciidoc argument"
|
||||
echo "when calling configure."
|
||||
echo
|
||||
echo "=================================="
|
||||
exit 1
|
||||
fi
|
||||
elif [ "$1" = "man" ]; then
|
||||
input=${output%%.1.in}.1.txt
|
||||
base=${output%%.1.in}
|
||||
|
||||
if test "$2" = none; then
|
||||
echo "=================================="
|
||||
echo
|
||||
echo "You need asciidoc installed to be able to build the manpage."
|
||||
echo "To build without manpages, use the --disable-asciidoc argument"
|
||||
echo "when calling configure."
|
||||
echo
|
||||
echo "=================================="
|
||||
exit 1
|
||||
fi
|
||||
if "$2" -f manpage "$input"; then
|
||||
mv "$base.1" "$output"
|
||||
else
|
||||
cat<<EOF
|
||||
==================================
|
||||
You need a working asciidoc installed to be able to build the manpage.
|
||||
|
||||
a2x is installed, but for some reason it isn't working. Sometimes
|
||||
this happens because required docbook support files are missing.
|
||||
Please install docbook-xsl, docbook-xml, and xmlto (Debian) or
|
||||
similar. If you use homebrew on Mac OS X, install the docbook formula
|
||||
and add "export XML_CATALOG_FILES=/usr/local/etc/xml/catalog" to your
|
||||
.bashrc file.
|
||||
|
||||
Alternatively, to build without manpages, use the --disable-asciidoc
|
||||
argument when calling configure.
|
||||
==================================
|
||||
EOF
|
||||
exit 1
|
||||
fi
|
||||
fi
|
@ -1,56 +0,0 @@
|
||||
## Instructions for building the official rpms.
|
||||
##
|
||||
The process used to create the official rpms is as follows:
|
||||
|
||||
You'll need to install libevent headers, usually located in package named
|
||||
libevent-devel. Alternatively, you could download latest libevent from
|
||||
https://libevent.org/ but that shouldn't be necessary.
|
||||
|
||||
Download and Extract the latest tor source code from
|
||||
https://www.torproject.org/download
|
||||
|
||||
In the resulting directory:
|
||||
LIBS=-lrt ./configure
|
||||
make dist-rpm
|
||||
|
||||
You should have at least two, maybe three, rpms. There should be the binary
|
||||
(i686|x86_64).rpm, a src.rpm, and on redhat/centos machines, a debuginfo.rpm.
|
||||
The debuginfo rpms are created if package redhat-rpm-config is installed (case
|
||||
of redhat distros).
|
||||
|
||||
This step suffices unless you want to create RPMs for distros other than the
|
||||
one you used for building.
|
||||
|
||||
|
||||
## Instructions for building RPMs for multiple architectures or distributions
|
||||
## using 'mock' on Fedora or RHEL (and clones)
|
||||
|
||||
Make sure you have mock installed and configured, see following HOWTOs for setup:
|
||||
https://fedoraproject.org/wiki/How_to_create_an_RPM_package
|
||||
https://fedoraproject.org/wiki/Using_Mock_to_test_package_builds
|
||||
|
||||
Take the source RPM generated by previous step, and execute mock for every
|
||||
target architecture (the names come from files in /etc/mock, strip the .cfg
|
||||
extension in the -r parameter):
|
||||
|
||||
mock --rebuild -r fedora-17-x86_64 tor-X.Y.Z.src.rpm
|
||||
|
||||
Building for EL5 from newer distro (e.g. EL6 or Fedora 17) will fail due to bug
|
||||
(https://bugzilla.redhat.com/show_bug.cgi?id=490613).
|
||||
Here's a workaround:
|
||||
|
||||
Before even building the source RPM, install fedora-packager and instruct
|
||||
the build system to use rpmbuild-md5 like this:
|
||||
|
||||
dnf install fedora-packager
|
||||
export RPMBUILD=rpmbuild-md5
|
||||
|
||||
Then proceed as usual to create the source RPM and binary RPMs:
|
||||
|
||||
LIBS=-lrt ./configure
|
||||
make dist-rpm
|
||||
mock --rebuild -r epel-5-x86_64 tor-X.Y.Z.src.rpm
|
||||
|
||||
|
||||
(Note: don't build under OpenVZ - it breaks unshare() syscall, which in turn
|
||||
breaks mock. It could save you several hours.)
|
112
doc/include.am
112
doc/include.am
@ -1,112 +0,0 @@
|
||||
# We use a two-step process to generate documentation from asciidoc files.
|
||||
#
|
||||
# First, we use asciidoc/a2x to process the asciidoc files into .1.in and
|
||||
# .html.in files (see the asciidoc-helper.sh script). These are the same as
|
||||
# the regular .1 and .html files, except that they still have some autoconf
|
||||
# variables set in them.
|
||||
#
|
||||
# Second, we use config.status to turn .1.in files into .1 files and
|
||||
# .html.in files into .html files.
|
||||
#
|
||||
# We do the steps in this order so that we can ship the .*.in files as
|
||||
# part of the source distribution, so that people without asciidoc can
|
||||
# just use the .1 and .html files.
|
||||
|
||||
all_mans = doc/man/tor doc/man/tor-gencert doc/man/tor-resolve doc/man/torify doc/man/tor-print-ed-signing-cert
|
||||
|
||||
if USE_ASCIIDOC
|
||||
txt_in = $(all_mans:=.1.txt)
|
||||
|
||||
if BUILD_HTML_DOCS
|
||||
html_in = $(all_mans:=.html.in)
|
||||
doc_DATA = $(all_mans:=.html)
|
||||
else
|
||||
html_in =
|
||||
doc_DATA =
|
||||
endif
|
||||
|
||||
if BUILD_MANPAGE
|
||||
nodist_man1_MANS = $(all_mans:=.1)
|
||||
man_in = $(all_mans:=.1.in)
|
||||
else
|
||||
nodist_man1_MANS =
|
||||
man_in =
|
||||
endif
|
||||
|
||||
else
|
||||
|
||||
html_in =
|
||||
doc_DATA =
|
||||
man_in =
|
||||
txt_in =
|
||||
nodist_man1_MANS =
|
||||
|
||||
endif
|
||||
|
||||
EXTRA_DIST+= doc/asciidoc-helper.sh \
|
||||
$(html_in) $(man_in) $(txt_in) \
|
||||
doc/state-contents.txt \
|
||||
doc/torrc_format.txt \
|
||||
doc/tor-doxygen.css \
|
||||
doc/TUNING \
|
||||
doc/HACKING/README.1st.md \
|
||||
doc/HACKING/CodingStandards.md \
|
||||
doc/HACKING/Fuzzing.md \
|
||||
doc/HACKING/GettingStarted.md \
|
||||
doc/HACKING/HelpfulTools.md \
|
||||
doc/HACKING/HowToReview.md \
|
||||
doc/HACKING/Module.md \
|
||||
doc/HACKING/ReleasingTor.md \
|
||||
doc/HACKING/WritingTests.md \
|
||||
doc/HACKING/tracing/EventsCircuit.md \
|
||||
doc/HACKING/tracing/README.md
|
||||
|
||||
docdir = @docdir@
|
||||
|
||||
asciidoc_product = $(nodist_man1_MANS) $(doc_DATA)
|
||||
|
||||
# Generate the html documentation from asciidoc, but don't do
|
||||
# machine-specific replacements yet
|
||||
$(html_in) :
|
||||
$(AM_V_GEN)$(top_srcdir)/doc/asciidoc-helper.sh html @ASCIIDOC@ $(top_srcdir)/$@
|
||||
|
||||
# Generate the manpage from asciidoc, but don't do
|
||||
# machine-specific replacements yet
|
||||
$(man_in) :
|
||||
$(AM_V_GEN)$(top_srcdir)/doc/asciidoc-helper.sh man @A2X@ $(top_srcdir)/$@
|
||||
|
||||
doc/man/tor.1.in: doc/man/tor.1.txt
|
||||
doc/man/torify.1.in: doc/man/torify.1.txt
|
||||
doc/man/tor-gencert.1.in: doc/man/tor-gencert.1.txt
|
||||
doc/man/tor-resolve.1.in: doc/man/tor-resolve.1.txt
|
||||
doc/man/tor-print-ed-signing-cert.1.in: doc/man/tor-print-ed-signing-cert.1.txt
|
||||
|
||||
doc/man/tor.html.in: doc/man/tor.1.txt
|
||||
doc/man/torify.html.in: doc/man/torify.1.txt
|
||||
doc/man/tor-gencert.html.in: doc/man/tor-gencert.1.txt
|
||||
doc/man/tor-resolve.html.in: doc/man/tor-resolve.1.txt
|
||||
doc/man/tor-print-ed-signing-cert.html.in: doc/man/tor-print-ed-signing-cert.1.txt
|
||||
|
||||
# use config.status to swap all machine-specific magic strings
|
||||
# in the asciidoc with their replacements.
|
||||
$(asciidoc_product) :
|
||||
$(AM_V_GEN)$(MKDIR_P) $(@D)
|
||||
$(AM_V_at)if test -e $(top_srcdir)/$@.in && ! test -e $@.in ; then \
|
||||
cp $(top_srcdir)/$@.in $@; \
|
||||
fi
|
||||
$(AM_V_at)$(top_builddir)/config.status -q --file=$@;
|
||||
|
||||
doc/man/tor.html: doc/man/tor.html.in
|
||||
doc/man/tor-gencert.html: doc/man/tor-gencert.html.in
|
||||
doc/man/tor-resolve.html: doc/man/tor-resolve.html.in
|
||||
doc/man/tor-print-ed-signing-cert.html: doc/man/tor-print-ed-signing-cert.html.in
|
||||
doc/man/torify.html: doc/man/torify.html.in
|
||||
|
||||
doc/man/tor.1: doc/man/tor.1.in
|
||||
doc/man/tor-gencert.1: doc/man/tor-gencert.1.in
|
||||
doc/man/tor-resolve.1: doc/man/tor-resolve.1.in
|
||||
doc/man/tor-print-ed-signing-cert.1: doc/man/tor-print-ed-signing-cert.1.in
|
||||
doc/man/torify.1: doc/man/torify.1.in
|
||||
|
||||
CLEANFILES+= $(asciidoc_product)
|
||||
DISTCLEANFILES+= $(html_in) $(man_in)
|
@ -1,88 +0,0 @@
|
||||
// Copyright (c) The Tor Project, Inc.
|
||||
// See LICENSE for licensing information
|
||||
// This is an asciidoc file used to generate the manpage/html reference.
|
||||
// Learn asciidoc on https://www.methods.co.nz/asciidoc/userguide.html
|
||||
:man source: Tor
|
||||
:man manual: Tor Manual
|
||||
tor-gencert(1)
|
||||
==============
|
||||
Tor Project, Inc.
|
||||
|
||||
NAME
|
||||
----
|
||||
tor-gencert - Generate certs and keys for Tor directory authorities
|
||||
|
||||
SYNOPSIS
|
||||
--------
|
||||
**tor-gencert** [-h|--help] [-v] [-r|--reuse] [--create-identity-key] [-i __id_file__] [-c
|
||||
__cert_file__] [-m __num__] [-a __address__:__port__]
|
||||
|
||||
DESCRIPTION
|
||||
-----------
|
||||
**tor-gencert** generates certificates and private keys for use by Tor
|
||||
directory authorities running the v3 Tor directory protocol, as used by
|
||||
Tor 0.2.0 and later. If you are not running a directory authority, you
|
||||
don't need to use tor-gencert. +
|
||||
|
||||
Every directory authority has a long term authority __identity__ __key__ (which
|
||||
is distinct from the identity key it uses as a Tor server); this key
|
||||
should be kept offline in a secure location. It is used to certify
|
||||
shorter-lived __signing__ __keys__, which are kept online and used by the
|
||||
directory authority to sign votes and consensus documents. +
|
||||
|
||||
After you use this program to generate a signing key and a certificate,
|
||||
copy those files to the keys subdirectory of your Tor process, and send
|
||||
Tor a SIGHUP signal. DO NOT COPY THE IDENTITY KEY.
|
||||
|
||||
OPTIONS
|
||||
-------
|
||||
**-v**::
|
||||
Display verbose output.
|
||||
|
||||
**-h** or **--help**::
|
||||
Display help text and exit.
|
||||
|
||||
**-r** or **--reuse**::
|
||||
Generate a new certificate, but not a new signing key. This can be used to
|
||||
change the address or lifetime associated with a given key.
|
||||
|
||||
**--create-identity-key**::
|
||||
Generate a new identity key. You should only use this option the first time
|
||||
you run tor-gencert; in the future, you should use the identity key that's
|
||||
already there.
|
||||
|
||||
**-i** __FILENAME__::
|
||||
Read the identity key from the specified file. If the file is not present
|
||||
and --create-identity-key is provided, create the identity key in the
|
||||
specified file. Default: "./authority_identity_key"
|
||||
|
||||
**-s** __FILENAME__::
|
||||
Write the signing key to the specified file. Default:
|
||||
"./authority_signing_key"
|
||||
|
||||
**-c** __FILENAME__::
|
||||
Write the certificate to the specified file. Default:
|
||||
"./authority_certificate"
|
||||
|
||||
**-m** __NUM__::
|
||||
Number of months that the certificate should be valid. Default: 12.
|
||||
|
||||
**--passphrase-fd** __FILEDES__::
|
||||
Filedescriptor to read the passphrase from. Ends at the first NUL or
|
||||
newline. Default: read from the terminal.
|
||||
|
||||
**-a** __address__:__port__::
|
||||
If provided, advertise the address:port combination as this authority's
|
||||
preferred directory port in its certificate. If the address is a hostname,
|
||||
the hostname is resolved to an IP before it's published.
|
||||
|
||||
BUGS
|
||||
----
|
||||
This probably doesn't run on Windows. That's not a big issue, since we don't
|
||||
really want authorities to be running on Windows anyway.
|
||||
|
||||
SEE ALSO
|
||||
--------
|
||||
**tor**(1) +
|
||||
|
||||
See also the "dir-spec.txt" file, distributed with Tor.
|
@ -1,34 +0,0 @@
|
||||
// Copyright (c) The Tor Project, Inc.
|
||||
// See LICENSE for licensing information
|
||||
// This is an asciidoc file used to generate the manpage/html reference.
|
||||
// Learn asciidoc on https://www.methods.co.nz/asciidoc/userguide.html
|
||||
:man source: Tor
|
||||
:man manual: Tor Manual
|
||||
tor-print-ed-signing-cert(1)
|
||||
============================
|
||||
Tor Project, Inc.
|
||||
|
||||
NAME
|
||||
----
|
||||
tor-print-ed-signing-cert - print expiration date of ed25519 signing certificate
|
||||
|
||||
SYNOPSIS
|
||||
--------
|
||||
**tor-print-ed-signing-cert** __<path to ed25519_signing_cert file>__
|
||||
|
||||
DESCRIPTION
|
||||
-----------
|
||||
**tor-print-ed-signing-cert** is utility program for Tor relay operators to
|
||||
check expiration date of ed25519 signing certificate.
|
||||
|
||||
Expiration date is printed in three formats:
|
||||
|
||||
* Human-readable timestamp, localized to current timezone.
|
||||
* RFC 1123 timestamp (at GMT timezone).
|
||||
* Unix time value.
|
||||
|
||||
SEE ALSO
|
||||
--------
|
||||
**tor**(1) +
|
||||
|
||||
https://spec.torproject.org/cert-spec
|
@ -1,50 +0,0 @@
|
||||
// Copyright (c) The Tor Project, Inc.
|
||||
// See LICENSE for licensing information
|
||||
// This is an asciidoc file used to generate the manpage/html reference.
|
||||
// Learn asciidoc on https://www.methods.co.nz/asciidoc/userguide.html
|
||||
:man source: Tor
|
||||
:man manual: Tor Manual
|
||||
tor-resolve(1)
|
||||
==============
|
||||
Peter Palfrader
|
||||
|
||||
NAME
|
||||
----
|
||||
tor-resolve - resolve a hostname to an IP address via tor
|
||||
|
||||
SYNOPSIS
|
||||
--------
|
||||
**tor-resolve** [-4|-5] [-v] [-x] [-p __socksport__] __hostname__ [__sockshost__[:__socksport__]]
|
||||
|
||||
DESCRIPTION
|
||||
-----------
|
||||
**tor-resolve** is a simple script to connect to a SOCKS proxy that knows about
|
||||
the SOCKS RESOLVE command, hand it a hostname, and return an IP address.
|
||||
|
||||
By default, **tor-resolve** uses the Tor server running on 127.0.0.1 on SOCKS
|
||||
port 9050. If this isn't what you want, you should specify an explicit
|
||||
__sockshost__ and/or __socksport__ on the command line.
|
||||
|
||||
OPTIONS
|
||||
-------
|
||||
**-v**::
|
||||
Display verbose output.
|
||||
|
||||
**-x**::
|
||||
Perform a reverse lookup: get the PTR record for an IPv4 address.
|
||||
|
||||
**-5**::
|
||||
Use the SOCKS5 protocol. (Default)
|
||||
|
||||
**-4**::
|
||||
Use the SOCKS4a protocol rather than the default SOCKS5 protocol. Doesn't
|
||||
support reverse DNS.
|
||||
|
||||
**-p** __socksport__::
|
||||
Override the default SOCKS port without setting the hostname.
|
||||
|
||||
SEE ALSO
|
||||
--------
|
||||
**tor**(1), **torify**(1). +
|
||||
|
||||
For protocol details, see: https://spec.torproject.org/socks-extensions
|
4104
doc/man/tor.1.txt
4104
doc/man/tor.1.txt
File diff suppressed because it is too large
Load Diff
@ -1,37 +0,0 @@
|
||||
// Copyright (c) The Tor Project, Inc.
|
||||
// See LICENSE for licensing information
|
||||
// This is an asciidoc file used to generate the manpage/html reference.
|
||||
// Learn asciidoc on https://www.methods.co.nz/asciidoc/userguide.html
|
||||
:man source: Tor
|
||||
:man manual: Tor Manual
|
||||
torify(1)
|
||||
=========
|
||||
Tor Project, Inc.
|
||||
|
||||
NAME
|
||||
----
|
||||
torify - wrapper for torsocks and tor
|
||||
|
||||
SYNOPSIS
|
||||
--------
|
||||
**torify** __application__ [__application's__ __arguments__]
|
||||
|
||||
DESCRIPTION
|
||||
-----------
|
||||
**torify** is a simple wrapper that calls torsocks with a tor-specific
|
||||
configuration file.
|
||||
|
||||
It is provided for backward compatibility; instead you should use torsocks.
|
||||
|
||||
WARNING
|
||||
-------
|
||||
When used with torsocks, torify should not leak DNS requests or UDP data.
|
||||
|
||||
torify can leak ICMP data.
|
||||
|
||||
torify will not ensure that different requests are processed on
|
||||
different circuits.
|
||||
|
||||
SEE ALSO
|
||||
--------
|
||||
**tor**(1), **torsocks**(1)
|
@ -1,3 +0,0 @@
|
||||
# There is a single space on the footer-text line to make it reproducible.
|
||||
[footer-text]
|
||||
|
@ -1,242 +0,0 @@
|
||||
|
||||
Contents of the Tor state file
|
||||
==============================
|
||||
|
||||
The state file is structured with more or less the same rules as torrc.
|
||||
Recognized fields are:
|
||||
|
||||
TorVersion
|
||||
|
||||
The version of Tor that wrote this file
|
||||
|
||||
LastWritten
|
||||
|
||||
Time when this state file was written.
|
||||
Given in ISO format (YYYY-MM-DD HH:MM:SS)
|
||||
|
||||
|
||||
MinutesSinceUserActivity (integer)
|
||||
Dormant (0, 1, or "auto")
|
||||
|
||||
These values are used to keep track of how long Tor has been idle,
|
||||
for the purpose of becoming 'dormant' after a long period without
|
||||
any user-initiated requests.
|
||||
|
||||
"MinutesSinceUserActivity" is the number of minutes since the last
|
||||
time the user asked us to do something. It is set to zero if we're
|
||||
dormant.
|
||||
|
||||
"Dormant" is 1 if Tor was dormant when it wrote its state file, 0 if
|
||||
Tor was active, and "auto" if Tor was starting for the first time.
|
||||
|
||||
AccountingBytesReadInInterval (memory unit)
|
||||
AccountingBytesWrittenInInterval (memory unit)
|
||||
AccountingExpectedUsage (memory unit)
|
||||
AccountingIntervalStart (ISO time)
|
||||
AccountingSecondsActive (time interval)
|
||||
AccountingSecondsToReachSoftLimit (time interval)
|
||||
AccountingSoftLimitHitAt (ISO time)
|
||||
AccountingBytesAtSoftLimit (memory unit)
|
||||
|
||||
These fields describe the state of the accounting subsystem.
|
||||
|
||||
The IntervalStart is the time at which the current accounting
|
||||
interval began. We were expecting to use ExpectedUsage over the
|
||||
course of the interval. BytesRead/BytesWritten are the total
|
||||
number of bytes transferred over the whole interval. If Tor has
|
||||
been active during the interval, then AccountingSecondsActive is
|
||||
the amount of time for which it has been active. We were expecting
|
||||
to hit the bandwidth soft limit in SecondsToReachSoftLimit after we
|
||||
became active. When we hit the soft limit, we record
|
||||
BytesAtSoftLimit. If we hit the soft limit already, we did so at
|
||||
SoftLimitHitAt.
|
||||
|
||||
TransportProxy
|
||||
|
||||
One or more of these may be present.
|
||||
|
||||
The format is "transportname addr:port", to remember the address
|
||||
at which a pluggable transport was listening. Tor bridges use
|
||||
this information to spawn pluggable transport listeners in the
|
||||
same IP address and TCP port even after tor client restarts.
|
||||
|
||||
BWHistory___Ends (ISO time)
|
||||
BWHistory___Interval (integer, number of seconds)
|
||||
BWHistory___Values (comma-separated list of integer)
|
||||
BWHistory___Maxima (comma-separated list of integer)
|
||||
|
||||
These values record bandwidth history. The "Values" fields are a list,
|
||||
for some number of "Intervals", of the total amount read/written during
|
||||
that integer. The "Maxima" are the highest burst for each interval.
|
||||
|
||||
Interval duration is set by the "Interval" field, in seconds. The
|
||||
"Ends" field is the ending time of the last interval in each list.
|
||||
|
||||
Recognized values for "___" are:
|
||||
Read -- total bytes read
|
||||
Write -- total bytes written
|
||||
DirRead -- total bytes read for directory connections.
|
||||
DirWrite -- total bytes written for directory connections.
|
||||
IPv6Read -- total bytes read on IPv6 connections
|
||||
IPv6Write -- total bytes written on IPv6 connections
|
||||
|
||||
LastRotatedOnionKey
|
||||
|
||||
The last time that we changed our onion key for a new one.
|
||||
Given in ISO format (YYYY-MM-DD HH:MM:SS)
|
||||
|
||||
This field is used to ensure that onion key rotations happen with the
|
||||
appropriate frequency.
|
||||
|
||||
TotalBuildTimes
|
||||
CircuitBuildAbandonedCount
|
||||
CircuitBuildTimeBin
|
||||
|
||||
These fields are used by the Circuit Build Timeout code, which
|
||||
tries to learn what times are reasonable for circuit construction,
|
||||
so that it can reject circuits that take too long to build.
|
||||
|
||||
CircuitBuildTimeBin is a count of circuits that were build
|
||||
successfully in some timeframe. This entry can repeat; each of
|
||||
these represents some bar on a histogram. The first integer is a
|
||||
number of milliseconds; it tells the position of the center of the
|
||||
histogram bin on the time axis. The second number is a count of
|
||||
circuits in that bin.
|
||||
|
||||
CircuitBuildTimeAbandonedCount is a count of circuits that we
|
||||
simply gave up on building because they were taking far too long.
|
||||
|
||||
TotalBuildTimes is the number of circuit build times that we
|
||||
observed in order to build the above measurements fields. If it
|
||||
reaches a cap, then older measurements get thrown away.
|
||||
|
||||
Guard [key=value] [key=value]...
|
||||
|
||||
Describes a single entry guard used by the client. Key=value
|
||||
entries with unrecognized keys are persisted. Order is not
|
||||
significant. For more information about terminology used here,
|
||||
system, see guard-spec.txt in the tor specifications repository.
|
||||
|
||||
Recognized keys are:
|
||||
|
||||
in (string)
|
||||
|
||||
The name of a guard selection that this guard is in.
|
||||
|
||||
rsa_id (string)
|
||||
|
||||
RSA fingerprint of this guard, without spaces.
|
||||
|
||||
nickname (string)
|
||||
|
||||
Declared nickname of this guard.
|
||||
|
||||
sampled_on (Time in ISO YYYY-MM-DDTHH:MM:SS format)
|
||||
|
||||
When was this guard added to the Guard sample?
|
||||
|
||||
sampled_by (tor version)
|
||||
|
||||
Which version of Tor added this Guard to the sample?
|
||||
(Used to help with debugging.)
|
||||
|
||||
sampled_idx (integer)
|
||||
|
||||
Index of this guard among sampled guards.
|
||||
|
||||
listed (boolean)
|
||||
|
||||
Did this guard appear in the most recent consensus?
|
||||
|
||||
unlisted_since (Time in ISO YYYY-MM-DDTHH:MM:SS format)
|
||||
|
||||
If this guard is not listed, when is the earliest
|
||||
consensus in which we found it unlisted?
|
||||
|
||||
confirmed_on (Time in ISO YYYY-MM-DDTHH:MM:SS format)
|
||||
|
||||
When did this guard become confirmed?
|
||||
|
||||
confirmed_idx (integer)
|
||||
|
||||
Index of this guard among confirmed guards.
|
||||
|
||||
bridge_addr (address)
|
||||
|
||||
If this guard is a bridge, its current address.
|
||||
|
||||
pb_use_attempts
|
||||
pb_use_successes
|
||||
pb_circ_attempts
|
||||
pb_successful_circuits_closed
|
||||
pb_collapsed_circuits
|
||||
pb_unusable_circuits
|
||||
pb_timeouts
|
||||
|
||||
Used by the pathbias subsystem to keep a record of the
|
||||
behavior of circuits built through this guard, in hopes of
|
||||
detecting guards try to that interfere with traffic.
|
||||
|
||||
All of these fields are floating-point integers which
|
||||
represent a count of circuits that have been trated in
|
||||
various ways. These counts decay with time.
|
||||
|
||||
"use_attempts" is a count of the circuits that we've built
|
||||
and tried to use for traffic.
|
||||
|
||||
"successful_circuits_closed" is a count of circuits that
|
||||
have closed "naturally" without timeout or error.
|
||||
|
||||
"use_successes" is a count of circuits that we've sent
|
||||
traffic on, and which closed "naturally" without timeout
|
||||
or error.
|
||||
|
||||
"circ_attempts" is a count of circuits we've tried to
|
||||
build through this guard.
|
||||
|
||||
"collapsed_circuits" is a count of circuits that failed
|
||||
after having been built, but before sending traffic.
|
||||
|
||||
"unusable_circuits" is a count of circuits that we
|
||||
built, but where streams or probes but which failed,
|
||||
or which encountered questionable errors.
|
||||
|
||||
"timeouts" is a count of circuits that encountered a
|
||||
timeout while we were building them.
|
||||
|
||||
Obsolete fields include:
|
||||
|
||||
EntryGuard
|
||||
EntryGuardDownSince
|
||||
EntryGuardUnlistedSince
|
||||
EntryGuardAddedBy
|
||||
|
||||
These lines formed sections related to entry guards. Each section
|
||||
starts with a single EntryGuard line, and is then followed by
|
||||
information on the state of the Entry guard.
|
||||
|
||||
The EntryGuard line contains a nickname, then an identity digest, of
|
||||
the guard.
|
||||
|
||||
The EntryGuardDownSince and EntryGuardUnlistedSince lines are present
|
||||
if the entry guard is believed to be non-running or non-listed. If
|
||||
present, they contain a line in ISO format (YYYY-MM-DD HH:MM:SS).
|
||||
|
||||
The EntryGuardAddedBy line is optional. It contains three
|
||||
space-separated fields: the identity of the entry guard, the version of
|
||||
Tor that added it, and the ISO time at which it was added.
|
||||
|
||||
EntryGuardPathBias and EntryGuardPathUseBias are superseded by
|
||||
the `pb_...` elements in the Guard flag, and served a similar purpose.
|
||||
|
||||
These entries have all been superseded by the Guard line type,
|
||||
since Tor 0.3.0.1-alpha.
|
||||
|
||||
HidServRevCounter
|
||||
|
||||
It was once used to ensure that v3 onion service directory revision
|
||||
numbers were strictly increasing; we now use an order-preserving
|
||||
encryption scheme for that purpose.
|
||||
|
||||
This option could appear multiple times; each time it does, it
|
||||
applies to a different hidden service.
|
@ -1,3 +0,0 @@
|
||||
o Documentation:
|
||||
- Update doc/state-contents.txt to more accurately explain the
|
||||
current contents of a Tor state file. Closes ticket 40136.
|
@ -1,10 +0,0 @@
|
||||
|
||||
p.definition {
|
||||
font-size: small;
|
||||
padding-left: 1.5em;
|
||||
}
|
||||
|
||||
p.reference {
|
||||
font-size: small;
|
||||
padding-left: 1.5em;
|
||||
}
|
@ -1,212 +0,0 @@
|
||||
|
||||
This document specifies the current format and semantics of the torrc
|
||||
file, as of July 2015. Note that we make no guarantee about the
|
||||
stability of this format. If you write something designed for strict
|
||||
compatibility with this document, please expect us to break it sooner or
|
||||
later.
|
||||
|
||||
Yes, some of this is quite stupid. My goal here is to explain what it
|
||||
does, not what it should do.
|
||||
|
||||
- Nick
|
||||
|
||||
|
||||
|
||||
1. File Syntax
|
||||
|
||||
; The syntax here is defined an Augmented Backus-Naur form, as
|
||||
; specified in RFC5234.
|
||||
|
||||
; A file is interpreted as every Entry in the file, in order.
|
||||
TorrcFile = *Line [ UnterminatedLine ]
|
||||
|
||||
Line = BlankLine LF / Entry LF
|
||||
UnterminatedLine = BlankLine / Entry
|
||||
|
||||
BlankLine = *WSP OptComment LF
|
||||
BlankLine =/ *WSP LF
|
||||
|
||||
OptComment = [ Comment ]
|
||||
|
||||
Comment = "#" *NonLF
|
||||
|
||||
; Each Entry is interpreted as an optional "Magic" flag, a key, and a
|
||||
; value.
|
||||
Entry = *WSP [ Magic ] Key 1*(1*WSP / "\" NL *WSP) Val LF
|
||||
Entry =/ *WSP [ Magic ] Key *( *WSP / "\" NL *WSP) LF
|
||||
|
||||
Magic = "+" / "/"
|
||||
|
||||
; Keys are always specified verbatim. They are case insensitive. It
|
||||
; is an error to specify a key that Tor does not recognize.
|
||||
Key = 1*KC
|
||||
|
||||
; Sadly, every kind of value is decoded differently...
|
||||
Val = QuotedVal / ContinuedVal / PlainVal
|
||||
|
||||
; The text of a PlainVal is the text of its PVBody portion,
|
||||
; plus the optional trailing backslash.
|
||||
PlainVal = PVBody [ "\" ] *WSP OptComment
|
||||
|
||||
; Note that a PVBody is copied verbatim. Slashes are included
|
||||
; verbatim. No changes are made. Note that a body may be empty.
|
||||
PVBody = * (VC / "\" NonLF )
|
||||
|
||||
; The text of a ContinuedVal is the text of each of its PVBody
|
||||
; sub-elements, in order, concatenated.
|
||||
ContinuedVal = CVal1 *CVal2 CVal3
|
||||
|
||||
CVal1 = PVBody "\" LF
|
||||
CVal2 = PVBody ( "\" LF / Comment LF )
|
||||
CVal3 = PVBody
|
||||
|
||||
; The text of a QuotedVal is decoded as if it were a C string.
|
||||
QuotedVal = DQ QVBody DQ *WSP Comment
|
||||
|
||||
QVBody = QC
|
||||
QVBody =/ "\" ( "n" / "r" / "t" / "\" / "'" / DQUOTE )
|
||||
QVBOdy =/ "\" ( "x" 2HEXDIG / 1*3OCTDIG )
|
||||
|
||||
; Anything besides NUL and LF
|
||||
NonLF = %x01-%x09 / %x0b - %xff
|
||||
|
||||
; Note that on windows, we open our configuration files in "text" mode,
|
||||
; which causes CRLF pairs to be interpreted as LF. So, on windows:
|
||||
; LF = [ %x0d ] %x0a
|
||||
; but everywhere else,
|
||||
LF = %0x0a
|
||||
|
||||
OCTDIG = '0' - '7'
|
||||
|
||||
KC = Any character except an isspace() character or '#' or NUL
|
||||
VC = Any character except '\\', '\n', '#', or NUL
|
||||
QC = Any character except '\n', '\\', '\"', or NUL
|
||||
|
||||
2. Mid-level Semantics
|
||||
|
||||
|
||||
There are four configuration "domains", from lowest to highest priority:
|
||||
|
||||
* Built-in defaults
|
||||
* The "torrc_defaults" file, if any
|
||||
* The "torrc" file, if any
|
||||
* Arguments provided on the command line, if any.
|
||||
|
||||
Normally, values from high-priority domains override low-priority
|
||||
domains, but see 'magic' below.
|
||||
|
||||
Configuration keys fall into three categories: singletons, lists, and
|
||||
groups.
|
||||
|
||||
A singleton key may appear at most once in any domain. Its
|
||||
corresponding value is equal to its value in the highest-priority
|
||||
domain in which it occurs.
|
||||
|
||||
A list key may appear any number of times in a domain. By default,
|
||||
its corresponding value is equal to all of the values specified for
|
||||
it in the highest-priority domain in which it appears. (See 'magic'
|
||||
below).
|
||||
|
||||
A group key may appear any number of times in a domain. It is
|
||||
associated with a number of other keys in the same group. The
|
||||
relative positions of entries with the keys in a single group
|
||||
matters, but entries with keys not in the group may be freely
|
||||
interspersed. By default, the group has a value equal to all keys
|
||||
and values it contains, from the highest-priority domain in which any
|
||||
of its keys occurs.
|
||||
|
||||
Magic:
|
||||
|
||||
If the '/' flag is specified for an entry, it sets the value for
|
||||
that entry to an empty list. (This will cause a higher-priority
|
||||
domain to clear a list from a lower-priority domain, without
|
||||
actually adding any entries.)
|
||||
|
||||
If the '+' flag is specified for the first entry in a list or a
|
||||
group that appears in a given domain, that list or group is
|
||||
appended to the list or group from the next-lowest-priority
|
||||
domain, rather than replacing it.
|
||||
|
||||
3. High-level semantics
|
||||
|
||||
There are further constraints on the values that each entry can take.
|
||||
These constraints are out-of-scope for this document.
|
||||
|
||||
4. Examples
|
||||
|
||||
(Indentation is removed in this section, to avoid confusion.)
|
||||
|
||||
4.1. Syntax examples
|
||||
|
||||
# Here is a simple configuration entry. The key is "Foo"; the value is
|
||||
# "Bar"
|
||||
|
||||
Foo Bar
|
||||
|
||||
# A configuration entry can have spaces in its value, as below. Here the
|
||||
# key is "Foo" and the value is "Bar Baz"
|
||||
Foo Bar Baz
|
||||
|
||||
# This configuration entry has space at the end of the line, but those
|
||||
# spaces don't count, so the key and value are still "Foo" and "Bar Baz"
|
||||
Foo Bar Baz
|
||||
|
||||
# There can be an escaped newline between the value and the key. This
|
||||
# is another way to say key="Hello", value="World"
|
||||
Hello\
|
||||
World
|
||||
|
||||
# In regular entries of this kind, you can have a comment at the end of
|
||||
# the line, either with a space before it or not. Each of these is a
|
||||
# different spelling of key="Hello", value="World"
|
||||
|
||||
Hello World #today
|
||||
Hello World#tomorrow
|
||||
|
||||
# One way to encode a complex entry is as a C string. This is the same
|
||||
# as key="Hello", value="World!"
|
||||
Hello "World!"
|
||||
|
||||
# The string can contain the usual set of C escapes. This entry has
|
||||
# key="Hello", and value="\"World\"\nand\nuniverse"
|
||||
Hello "\"World\"\nand\nuniverse"
|
||||
|
||||
# And now we get to the more-or-less awful part.
|
||||
#
|
||||
# Multi-line entries ending with a backslash on each line aren't so
|
||||
# bad. The backslash is removed, and everything else is included
|
||||
# verbatim. So this entry has key="Hello" and value="Worldandfriends"
|
||||
Hello\
|
||||
World\
|
||||
and\
|
||||
friends
|
||||
|
||||
# Backslashes in the middle of a line are included as-is. The key of
|
||||
# this one is "Too" and the value is "Many\\Backsl\ashes \here" (with
|
||||
# backslashes in that last string as-is)
|
||||
Too \
|
||||
Many\\\
|
||||
Backsl\ashes \\
|
||||
here
|
||||
|
||||
# And here's the really yucky part. If a comment appears in a multi-line
|
||||
# entry, the entry is still able to continue on the next line, as in the
|
||||
# following, where the key is "This" and the value is
|
||||
# "entry and some are silly"
|
||||
This entry \
|
||||
# has comments \
|
||||
and some \
|
||||
are # generally \
|
||||
silly
|
||||
|
||||
# But you can also write that without the backslashes at the end of the
|
||||
# comment lines. That is to say, this entry is exactly the same as the
|
||||
# one above!
|
||||
This entry \
|
||||
# has comments
|
||||
and some \
|
||||
are # generally
|
||||
silly
|
||||
|
||||
|
||||
|
@ -1,51 +0,0 @@
|
||||
# Copyright (c) 2008 Guido U. Draheim <guidod@gmx.de>
|
||||
# Copyright (c) 2011 Maarten Bosmans <mkbosmans@gmail.com>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation, either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# As a special exception, the respective Autoconf Macro's copyright owner
|
||||
# gives unlimited permission to copy, distribute and modify the configure
|
||||
# scripts that are the output of Autoconf when processing the Macro. You
|
||||
# need not follow the terms of the GNU General Public License when using
|
||||
# or distributing such scripts, even though portions of the text of the
|
||||
# Macro appear in them. The GNU General Public License (GPL) does govern
|
||||
# all other use of the material that constitutes the Autoconf Macro.
|
||||
#
|
||||
# This special exception to the GPL applies to versions of the Autoconf
|
||||
# Macro released by the Autoconf Archive. When you make and distribute a
|
||||
# modified version of the Autoconf Macro, you may extend this special
|
||||
# exception to the GPL to apply to your modified version as well.
|
||||
# Copying and distribution of this file, with or without modification, are
|
||||
# permitted in any medium without royalty provided the copyright notice
|
||||
# and this notice are preserved. This file is offered as-is, without any
|
||||
# warranty.
|
||||
|
||||
#serial 5
|
||||
#serial 6
|
||||
|
||||
AC_DEFUN([AX_CHECK_COMPILE_FLAG],
|
||||
[AC_PREREQ(2.64)dnl for _AC_LANG_PREFIX and AS_VAR_IF
|
||||
AS_VAR_PUSHDEF([CACHEVAR],[ax_cv_check_[]_AC_LANG_ABBREV[]flags_$4_$1])dnl
|
||||
AC_CACHE_CHECK([whether _AC_LANG compiler accepts $1], CACHEVAR, [
|
||||
ax_check_save_flags=$[]_AC_LANG_PREFIX[]FLAGS
|
||||
_AC_LANG_PREFIX[]FLAGS="$[]_AC_LANG_PREFIX[]FLAGS $4 $1"
|
||||
AC_COMPILE_IFELSE([m4_default([$5],[AC_LANG_PROGRAM()])],
|
||||
[AS_VAR_SET(CACHEVAR,[yes])],
|
||||
[AS_VAR_SET(CACHEVAR,[no])])
|
||||
_AC_LANG_PREFIX[]FLAGS=$ax_check_save_flags])
|
||||
AS_VAR_IF(CACHEVAR,yes,
|
||||
[m4_default([$2], :)],
|
||||
[m4_default([$3], :)])
|
||||
AS_VAR_POPDEF([CACHEVAR])dnl
|
||||
])dnl AX_CHECK_COMPILE_FLAGS
|
@ -1,54 +0,0 @@
|
||||
# ===========================================================================
|
||||
# https://www.gnu.org/software/autoconf-archive/ax_check_sign.html
|
||||
# ===========================================================================
|
||||
#
|
||||
# SYNOPSIS
|
||||
#
|
||||
# AX_CHECK_SIGN (TYPE, [ACTION-IF-SIGNED], [ACTION-IF-UNSIGNED], [INCLUDES])
|
||||
#
|
||||
# DESCRIPTION
|
||||
#
|
||||
# Checks whether TYPE is signed or not. If no INCLUDES are specified, the
|
||||
# default includes are used. If ACTION-IF-SIGNED is given, it is
|
||||
# additional shell code to execute when the type is signed. If
|
||||
# ACTION-IF-UNSIGNED is given, it is executed when the type is unsigned.
|
||||
#
|
||||
# This macro assumes that the type exists. Therefore the existence of the
|
||||
# type should be checked before calling this macro. For example:
|
||||
#
|
||||
# AC_CHECK_HEADERS([wchar.h])
|
||||
# AC_CHECK_TYPE([wchar_t],,[ AC_MSG_ERROR([Type wchar_t not found.]) ])
|
||||
# AX_CHECK_SIGN([wchar_t],
|
||||
# [ AC_DEFINE(WCHAR_T_SIGNED, 1, [Define if wchar_t is signed]) ],
|
||||
# [ AC_DEFINE(WCHAR_T_UNSIGNED, 1, [Define if wchar_t is unsigned]) ], [
|
||||
# #ifdef HAVE_WCHAR_H
|
||||
# #include <wchar.h>
|
||||
# #endif
|
||||
# ])
|
||||
#
|
||||
# LICENSE
|
||||
#
|
||||
# Copyright (c) 2008 Ville Laurikari <vl@iki.fi>
|
||||
#
|
||||
# Copying and distribution of this file, with or without modification, are
|
||||
# permitted in any medium without royalty provided the copyright notice
|
||||
# and this notice are preserved. This file is offered as-is, without any
|
||||
# warranty.
|
||||
|
||||
#serial 6
|
||||
|
||||
AU_ALIAS([VL_CHECK_SIGN], [AX_CHECK_SIGN])
|
||||
AC_DEFUN([AX_CHECK_SIGN], [
|
||||
typename=`echo $1 | sed "s/@<:@^a-zA-Z0-9_@:>@/_/g"`
|
||||
AC_CACHE_CHECK([whether $1 is signed], ax_cv_decl_${typename}_signed, [
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[$4]],
|
||||
[[ int foo @<:@ 1 - 2 * !((($1) -1) < 0) @:>@ ]])],
|
||||
[ eval "ax_cv_decl_${typename}_signed=\"yes\"" ],
|
||||
[ eval "ax_cv_decl_${typename}_signed=\"no\"" ])])
|
||||
symbolname=`echo $1 | sed "s/@<:@^a-zA-Z0-9_@:>@/_/g" | tr "a-z" "A-Z"`
|
||||
if eval "test \"\${ax_cv_decl_${typename}_signed}\" = \"yes\""; then
|
||||
$2
|
||||
elif eval "test \"\${ax_cv_decl_${typename}_signed}\" = \"no\""; then
|
||||
$3
|
||||
fi
|
||||
])dnl
|
@ -1,88 +0,0 @@
|
||||
# ===========================================================================
|
||||
# https://www.gnu.org/software/autoconf-archive/ax_compiler_vendor.html
|
||||
# ===========================================================================
|
||||
#
|
||||
# SYNOPSIS
|
||||
#
|
||||
# AX_COMPILER_VENDOR
|
||||
#
|
||||
# DESCRIPTION
|
||||
#
|
||||
# Determine the vendor of the C/C++ compiler, e.g., gnu, intel, ibm, sun,
|
||||
# hp, borland, comeau, dec, cray, kai, lcc, metrowerks, sgi, microsoft,
|
||||
# watcom, etc. The vendor is returned in the cache variable
|
||||
# $ax_cv_c_compiler_vendor for C and $ax_cv_cxx_compiler_vendor for C++.
|
||||
#
|
||||
# LICENSE
|
||||
#
|
||||
# Copyright (c) 2008 Steven G. Johnson <stevenj@alum.mit.edu>
|
||||
# Copyright (c) 2008 Matteo Frigo
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation, either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# As a special exception, the respective Autoconf Macro's copyright owner
|
||||
# gives unlimited permission to copy, distribute and modify the configure
|
||||
# scripts that are the output of Autoconf when processing the Macro. You
|
||||
# need not follow the terms of the GNU General Public License when using
|
||||
# or distributing such scripts, even though portions of the text of the
|
||||
# Macro appear in them. The GNU General Public License (GPL) does govern
|
||||
# all other use of the material that constitutes the Autoconf Macro.
|
||||
#
|
||||
# This special exception to the GPL applies to versions of the Autoconf
|
||||
# Macro released by the Autoconf Archive. When you make and distribute a
|
||||
# modified version of the Autoconf Macro, you may extend this special
|
||||
# exception to the GPL to apply to your modified version as well.
|
||||
|
||||
#serial 17
|
||||
|
||||
AC_DEFUN([AX_COMPILER_VENDOR],
|
||||
[AC_CACHE_CHECK([for _AC_LANG compiler vendor], ax_cv_[]_AC_LANG_ABBREV[]_compiler_vendor,
|
||||
dnl Please add if possible support to ax_compiler_version.m4
|
||||
[# note: don't check for gcc first since some other compilers define __GNUC__
|
||||
vendors="intel: __ICC,__ECC,__INTEL_COMPILER
|
||||
ibm: __xlc__,__xlC__,__IBMC__,__IBMCPP__
|
||||
pathscale: __PATHCC__,__PATHSCALE__
|
||||
clang: __clang__
|
||||
cray: _CRAYC
|
||||
fujitsu: __FUJITSU
|
||||
sdcc: SDCC, __SDCC
|
||||
gnu: __GNUC__
|
||||
sun: __SUNPRO_C,__SUNPRO_CC
|
||||
hp: __HP_cc,__HP_aCC
|
||||
dec: __DECC,__DECCXX,__DECC_VER,__DECCXX_VER
|
||||
borland: __BORLANDC__,__CODEGEARC__,__TURBOC__
|
||||
comeau: __COMO__
|
||||
kai: __KCC
|
||||
lcc: __LCC__
|
||||
sgi: __sgi,sgi
|
||||
microsoft: _MSC_VER
|
||||
metrowerks: __MWERKS__
|
||||
watcom: __WATCOMC__
|
||||
portland: __PGI
|
||||
tcc: __TINYC__
|
||||
unknown: UNKNOWN"
|
||||
for ventest in $vendors; do
|
||||
case $ventest in
|
||||
*:) vendor=$ventest; continue ;;
|
||||
*) vencpp="defined("`echo $ventest | sed 's/,/) || defined(/g'`")" ;;
|
||||
esac
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM(,[
|
||||
#if !($vencpp)
|
||||
thisisanerror;
|
||||
#endif
|
||||
])], [break])
|
||||
done
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_vendor=`echo $vendor | cut -d: -f1`
|
||||
])
|
||||
])
|
@ -1,529 +0,0 @@
|
||||
# ===========================================================================
|
||||
# https://www.gnu.org/software/autoconf-archive/ax_compiler_version.html
|
||||
# ===========================================================================
|
||||
#
|
||||
# SYNOPSIS
|
||||
#
|
||||
# AX_COMPILER_VERSION
|
||||
#
|
||||
# DESCRIPTION
|
||||
#
|
||||
# This macro retrieves the compiler version and returns it in the cache
|
||||
# variable $ax_cv_c_compiler_version for C and $ax_cv_cxx_compiler_version
|
||||
# for C++.
|
||||
#
|
||||
# Version is returned as epoch:major.minor.patchversion
|
||||
#
|
||||
# Epoch is used in order to have an increasing version number in case of
|
||||
# marketing change.
|
||||
#
|
||||
# Epoch use: * borland compiler use chronologically 0turboc for turboc
|
||||
# era,
|
||||
#
|
||||
# 1borlanc BORLANDC++ before 5, 2cppbuilder for cppbuilder era,
|
||||
# 3borlancpp for return of BORLANDC++ (after version 5.5),
|
||||
# 4cppbuilder for cppbuilder with year version,
|
||||
# and 5xe for XE era.
|
||||
#
|
||||
# An empty string is returned otherwise.
|
||||
#
|
||||
# LICENSE
|
||||
#
|
||||
# Copyright (c) 2014 Bastien ROUCARIES <roucaries.bastien+autoconf@gmail.com>
|
||||
#
|
||||
# Copying and distribution of this file, with or without modification, are
|
||||
# permitted in any medium without royalty provided the copyright notice
|
||||
# and this notice are preserved. This file is offered as-is, without any
|
||||
# warranty.
|
||||
|
||||
#serial 12
|
||||
|
||||
# for intel
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_INTEL],
|
||||
[ dnl
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
[__INTEL_COMPILER/100],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown intel compiler version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
[(__INTEL_COMPILER%100)/10],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown intel compiler version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
[(__INTEL_COMPILER%10)],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown intel compiler version]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch"
|
||||
])
|
||||
|
||||
# for IBM
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_IBM],
|
||||
[ dnl
|
||||
dnl check between z/OS C/C++ and XL C/C++
|
||||
AC_COMPILE_IFELSE([
|
||||
AC_LANG_PROGRAM([],
|
||||
[
|
||||
#if defined(__COMPILER_VER__)
|
||||
choke me;
|
||||
#endif
|
||||
])],
|
||||
[
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
[__xlC__/100],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown IBM compiler major version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
[__xlC__%100],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown IBM compiler minor version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
[__xlC_ver__/0x100],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown IBM compiler patch version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_build,
|
||||
[__xlC_ver__%0x100],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown IBM compiler build version]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_build"
|
||||
],
|
||||
[
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
[__xlC__%1000],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown IBM compiler patch version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
[(__xlC__/10000)%10],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown IBM compiler minor version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
[(__xlC__/100000)%10],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown IBM compiler major version]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch"
|
||||
])
|
||||
])
|
||||
|
||||
# for pathscale
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_PATHSCALE],[
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
__PATHCC__,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown pathscale major]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
__PATHCC_MINOR__,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown pathscale minor]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
[__PATHCC_PATCHLEVEL__],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown pathscale patch level]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch"
|
||||
])
|
||||
|
||||
# for clang
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_CLANG],[
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
__clang_major__,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown clang major]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
__clang_minor__,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown clang minor]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
[__clang_patchlevel__],,0)
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch"
|
||||
])
|
||||
|
||||
# for crayc
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_CRAY],[
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
_RELEASE,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown crayc release]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
_RELEASE_MINOR,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown crayc minor]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor"
|
||||
])
|
||||
|
||||
# for fujitsu
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_FUJITSU],[
|
||||
AC_COMPUTE_INT(ax_cv_[]_AC_LANG_ABBREV[]_compiler_version,
|
||||
__FCC_VERSION,,
|
||||
AC_MSG_FAILURE([[[$0]]unknown fujitsu release]))
|
||||
])
|
||||
|
||||
# for GNU
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_GNU],[
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
__GNUC__,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown gcc major]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
__GNUC_MINOR__,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown gcc minor]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
[__GNUC_PATCHLEVEL__],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown gcc patch level]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch"
|
||||
])
|
||||
|
||||
# For sun
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_SUN],[
|
||||
m4_define([_AX_COMPILER_VERSION_SUN_NUMBER],
|
||||
[
|
||||
#if defined(__SUNPRO_CC)
|
||||
__SUNPRO_CC
|
||||
#else
|
||||
__SUNPRO_C
|
||||
#endif
|
||||
])
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_until59,
|
||||
!!(_AX_COMPILER_VERSION_SUN_NUMBER < 0x1000),,
|
||||
AC_MSG_FAILURE([[[$0]] unknown sun release version]))
|
||||
AS_IF([test "X$_ax_[]_AC_LANG_ABBREV[]_compiler_version_until59" = X1],
|
||||
[dnl
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
_AX_COMPILER_VERSION_SUN_NUMBER % 0x10,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown sun patch version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
(_AX_COMPILER_VERSION_SUN_NUMBER / 0x10) % 0x10,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown sun minor version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
(_AX_COMPILER_VERSION_SUN_NUMBER / 0x100),,
|
||||
AC_MSG_FAILURE([[[$0]] unknown sun major version]))
|
||||
],
|
||||
[dnl
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
_AX_COMPILER_VERSION_SUN_NUMBER % 0x10,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown sun patch version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
(_AX_COMPILER_VERSION_SUN_NUMBER / 0x100) % 0x100,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown sun minor version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
(_AX_COMPILER_VERSION_SUN_NUMBER / 0x1000),,
|
||||
AC_MSG_FAILURE([[[$0]] unknown sun major version]))
|
||||
])
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch"
|
||||
])
|
||||
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_HP],[
|
||||
m4_define([_AX_COMPILER_VERSION_HP_NUMBER],
|
||||
[
|
||||
#if defined(__HP_cc)
|
||||
__HP_cc
|
||||
#else
|
||||
__HP_aCC
|
||||
#endif
|
||||
])
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_untilA0121,
|
||||
!!(_AX_COMPILER_VERSION_HP_NUMBER <= 1),,
|
||||
AC_MSG_FAILURE([[[$0]] unknown hp release version]))
|
||||
AS_IF([test "X$_ax_[]_AC_LANG_ABBREV[]_compiler_version_untilA0121" = X1],
|
||||
[dnl By default output last version with this behavior.
|
||||
dnl it is so old
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="01.21.00"
|
||||
],
|
||||
[dnl
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
(_AX_COMPILER_VERSION_HP_NUMBER % 100),,
|
||||
AC_MSG_FAILURE([[[$0]] unknown hp release version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
((_AX_COMPILER_VERSION_HP_NUMBER / 100)%100),,
|
||||
AC_MSG_FAILURE([[[$0]] unknown hp minor version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
((_AX_COMPILER_VERSION_HP_NUMBER / 10000)%100),,
|
||||
AC_MSG_FAILURE([[[$0]] unknown hp major version]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch"
|
||||
])
|
||||
])
|
||||
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_DEC],[dnl
|
||||
m4_define([_AX_COMPILER_VERSION_DEC_NUMBER],
|
||||
[
|
||||
#if defined(__DECC_VER)
|
||||
__DECC_VER
|
||||
#else
|
||||
__DECCXX_VER
|
||||
#endif
|
||||
])
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
(_AX_COMPILER_VERSION_DEC_NUMBER % 10000),,
|
||||
AC_MSG_FAILURE([[[$0]] unknown dec release version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
((_AX_COMPILER_VERSION_DEC_NUMBER / 100000UL)%100),,
|
||||
AC_MSG_FAILURE([[[$0]] unknown dec minor version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
((_AX_COMPILER_VERSION_DEC_NUMBER / 10000000UL)%100),,
|
||||
AC_MSG_FAILURE([[[$0]] unknown dec major version]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch"
|
||||
])
|
||||
|
||||
# borland
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_BORLAND],[dnl
|
||||
m4_define([_AX_COMPILER_VERSION_TURBOC_NUMBER],
|
||||
[
|
||||
#if defined(__TURBOC__)
|
||||
__TURBOC__
|
||||
#else
|
||||
choke me
|
||||
#endif
|
||||
])
|
||||
m4_define([_AX_COMPILER_VERSION_BORLANDC_NUMBER],
|
||||
[
|
||||
#if defined(__BORLANDC__)
|
||||
__BORLANDC__
|
||||
#else
|
||||
__CODEGEARC__
|
||||
#endif
|
||||
])
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_PROGRAM(,
|
||||
_AX_COMPILER_VERSION_TURBOC_NUMBER)],
|
||||
[dnl TURBOC
|
||||
AC_COMPUTE_INT(
|
||||
_ax_[]_AC_LANG_ABBREV[]_compiler_version_turboc_raw,
|
||||
_AX_COMPILER_VERSION_TURBOC_NUMBER,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown turboc version]))
|
||||
AS_IF(
|
||||
[test $_ax_[]_AC_LANG_ABBREV[]_compiler_version_turboc_raw -lt 661 || test $_ax_[]_AC_LANG_ABBREV[]_compiler_version_turboc_raw -gt 1023],
|
||||
[dnl compute normal version
|
||||
AC_COMPUTE_INT(
|
||||
_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
_AX_COMPILER_VERSION_TURBOC_NUMBER % 0x100,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown turboc minor version]))
|
||||
AC_COMPUTE_INT(
|
||||
_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
(_AX_COMPILER_VERSION_TURBOC_NUMBER/0x100)%0x100,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown turboc major version]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="0turboc:$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor"],
|
||||
[dnl special version
|
||||
AS_CASE([$_ax_[]_AC_LANG_ABBREV[]_compiler_version_turboc_raw],
|
||||
[661],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="0turboc:1.00"],
|
||||
[662],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="0turboc:1.01"],
|
||||
[663],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="0turboc:2.00"],
|
||||
[
|
||||
AC_MSG_WARN([[[$0]] unknown turboc version between 0x295 and 0x400 please report bug])
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version=""
|
||||
])
|
||||
])
|
||||
],
|
||||
# borlandc
|
||||
[
|
||||
AC_COMPUTE_INT(
|
||||
_ax_[]_AC_LANG_ABBREV[]_compiler_version_borlandc_raw,
|
||||
_AX_COMPILER_VERSION_BORLANDC_NUMBER,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown borlandc version]))
|
||||
AS_CASE([$_ax_[]_AC_LANG_ABBREV[]_compiler_version_borlandc_raw],
|
||||
dnl BORLANDC++ before 5.5
|
||||
[512] ,[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="1borlanc:2.00"],
|
||||
[1024],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="1borlanc:3.00"],
|
||||
[1024],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="1borlanc:3.00"],
|
||||
[1040],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="1borlanc:3.1"],
|
||||
[1106],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="1borlanc:4.0"],
|
||||
[1280],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="1borlanc:5.0"],
|
||||
[1312],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="1borlanc:5.02"],
|
||||
dnl C++ Builder era
|
||||
[1328],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="2cppbuilder:3.0"],
|
||||
[1344],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="2cppbuilder:4.0"],
|
||||
dnl BORLANDC++ after 5.5
|
||||
[1360],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="3borlancpp:5.5"],
|
||||
[1361],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="3borlancpp:5.51"],
|
||||
[1378],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="3borlancpp:5.6.4"],
|
||||
dnl C++ Builder with year number
|
||||
[1392],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="4cppbuilder:2006"],
|
||||
[1424],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="4cppbuilder:2007"],
|
||||
[1555],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="4cppbuilder:2009"],
|
||||
[1569],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="4cppbuilder:2010"],
|
||||
dnl XE version
|
||||
[1584],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="5xe"],
|
||||
[1600],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="5xe:2"],
|
||||
[1616],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="5xe:3"],
|
||||
[1632],[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="5xe:4"],
|
||||
[
|
||||
AC_MSG_WARN([[[$0]] Unknown borlandc compiler version $_ax_[]_AC_LANG_ABBREV[]_compiler_version_borlandc_raw please report bug])
|
||||
])
|
||||
])
|
||||
])
|
||||
|
||||
# COMO
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_COMEAU],
|
||||
[ dnl
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
[__COMO_VERSION__%100],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown comeau compiler minor version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
[(__COMO_VERSION__/100)%10],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown comeau compiler major version]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor"
|
||||
])
|
||||
|
||||
# KAI
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_KAI],[
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
[__KCC_VERSION%100],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown kay compiler patch version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
[(__KCC_VERSION/100)%10],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown kay compiler minor version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
[(__KCC_VERSION/1000)%10],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown kay compiler major version]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch"
|
||||
])
|
||||
|
||||
dnl LCC
|
||||
dnl LCC does not output version...
|
||||
|
||||
# SGI
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_SGI],[
|
||||
m4_define([_AX_COMPILER_VERSION_SGI_NUMBER],
|
||||
[
|
||||
#if defined(_COMPILER_VERSION)
|
||||
_COMPILER_VERSION
|
||||
#else
|
||||
_SGI_COMPILER_VERSION
|
||||
#endif
|
||||
])
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
[_AX_COMPILER_VERSION_SGI_NUMBER%10],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown SGI compiler patch version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
[(_AX_COMPILER_VERSION_SGI_NUMBER/10)%10],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown SGI compiler minor version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
[(_AX_COMPILER_VERSION_SGI_NUMBER/100)%10],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown SGI compiler major version]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch"
|
||||
])
|
||||
|
||||
# microsoft
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_MICROSOFT],[
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
_MSC_VER%100,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown microsoft compiler minor version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
(_MSC_VER/100)%100,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown microsoft compiler major version]))
|
||||
dnl could be overridden
|
||||
_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch=0
|
||||
_ax_[]_AC_LANG_ABBREV[]_compiler_version_build=0
|
||||
# special case for version 6
|
||||
AS_IF([test "X$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major" = "X12"],
|
||||
[AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
_MSC_FULL_VER%1000,,
|
||||
_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch=0)])
|
||||
# for version 7
|
||||
AS_IF([test "X$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major" = "X13"],
|
||||
[AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
_MSC_FULL_VER%1000,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown microsoft compiler patch version]))
|
||||
])
|
||||
# for version > 8
|
||||
AS_IF([test $_ax_[]_AC_LANG_ABBREV[]_compiler_version_major -ge 14],
|
||||
[AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
_MSC_FULL_VER%10000,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown microsoft compiler patch version]))
|
||||
])
|
||||
AS_IF([test $_ax_[]_AC_LANG_ABBREV[]_compiler_version_major -ge 15],
|
||||
[AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_build,
|
||||
_MSC_BUILD,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown microsoft compiler build version]))
|
||||
])
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_build"
|
||||
])
|
||||
|
||||
# for metrowerks
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_METROWERKS],[dnl
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
__MWERKS__%0x100,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown metrowerks compiler patch version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
(__MWERKS__/0x100)%0x10,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown metrowerks compiler minor version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
(__MWERKS__/0x1000)%0x10,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown metrowerks compiler major version]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch"
|
||||
])
|
||||
|
||||
# for watcom
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_WATCOM],[dnl
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
__WATCOMC__%100,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown watcom compiler minor version]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
(__WATCOMC__/100)%100,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown watcom compiler major version]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor"
|
||||
])
|
||||
|
||||
# for PGI
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_PORTLAND],[
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
__PGIC__,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown pgi major]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
__PGIC_MINOR__,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown pgi minor]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
[__PGIC_PATCHLEVEL__],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown pgi patch level]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch"
|
||||
])
|
||||
|
||||
# tcc
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_TCC],[
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version=[`tcc -v | $SED 's/^[ ]*tcc[ ]\+version[ ]\+\([0-9.]\+\).*/\1/g'`]
|
||||
])
|
||||
|
||||
# for GNU
|
||||
AC_DEFUN([_AX_COMPILER_VERSION_SDCC],[
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_major,
|
||||
/* avoid parse error with comments */
|
||||
#if(defined(__SDCC_VERSION_MAJOR))
|
||||
__SDCC_VERSION_MAJOR
|
||||
#else
|
||||
SDCC/100
|
||||
#endif
|
||||
,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown sdcc major]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor,
|
||||
/* avoid parse error with comments */
|
||||
#if(defined(__SDCC_VERSION_MINOR))
|
||||
__SDCC_VERSION_MINOR
|
||||
#else
|
||||
(SDCC%100)/10
|
||||
#endif
|
||||
,,
|
||||
AC_MSG_FAILURE([[[$0]] unknown sdcc minor]))
|
||||
AC_COMPUTE_INT(_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch,
|
||||
[
|
||||
/* avoid parse error with comments */
|
||||
#if(defined(__SDCC_VERSION_PATCH))
|
||||
__SDCC_VERSION_PATCH
|
||||
#elsif(defined(_SDCC_VERSION_PATCHLEVEL))
|
||||
__SDCC_VERSION_PATCHLEVEL
|
||||
#else
|
||||
SDCC%10
|
||||
#endif
|
||||
],,
|
||||
AC_MSG_FAILURE([[[$0]] unknown sdcc patch level]))
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version="$_ax_[]_AC_LANG_ABBREV[]_compiler_version_major.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_minor.$_ax_[]_AC_LANG_ABBREV[]_compiler_version_patch"
|
||||
])
|
||||
|
||||
# main entry point
|
||||
AC_DEFUN([AX_COMPILER_VERSION],[dnl
|
||||
AC_REQUIRE([AX_COMPILER_VENDOR])
|
||||
AC_REQUIRE([AC_PROG_SED])
|
||||
AC_CACHE_CHECK([for _AC_LANG compiler version],
|
||||
ax_cv_[]_AC_LANG_ABBREV[]_compiler_version,
|
||||
[ dnl
|
||||
AS_CASE([$ax_cv_[]_AC_LANG_ABBREV[]_compiler_vendor],
|
||||
[intel],[_AX_COMPILER_VERSION_INTEL],
|
||||
[ibm],[_AX_COMPILER_VERSION_IBM],
|
||||
[pathscale],[_AX_COMPILER_VERSION_PATHSCALE],
|
||||
[clang],[_AX_COMPILER_VERSION_CLANG],
|
||||
[cray],[_AX_COMPILER_VERSION_CRAY],
|
||||
[fujitsu],[_AX_COMPILER_VERSION_FUJITSU],
|
||||
[gnu],[_AX_COMPILER_VERSION_GNU],
|
||||
[sun],[_AX_COMPILER_VERSION_SUN],
|
||||
[hp],[_AX_COMPILER_VERSION_HP],
|
||||
[dec],[_AX_COMPILER_VERSION_DEC],
|
||||
[borland],[_AX_COMPILER_VERSION_BORLAND],
|
||||
[comeau],[_AX_COMPILER_VERSION_COMEAU],
|
||||
[kai],[_AX_COMPILER_VERSION_KAI],
|
||||
[sgi],[_AX_COMPILER_VERSION_SGI],
|
||||
[microsoft],[_AX_COMPILER_VERSION_MICROSOFT],
|
||||
[metrowerks],[_AX_COMPILER_VERSION_METROWERKS],
|
||||
[watcom],[_AX_COMPILER_VERSION_WATCOM],
|
||||
[portland],[_AX_COMPILER_VERSION_PORTLAND],
|
||||
[tcc],[_AX_COMPILER_VERSION_TCC],
|
||||
[sdcc],[_AX_COMPILER_VERSION_SDCC],
|
||||
[ax_cv_[]_AC_LANG_ABBREV[]_compiler_version=""])
|
||||
])
|
||||
])
|
@ -1,128 +0,0 @@
|
||||
# This file is from Google Performance Tools, svn revision r226.
|
||||
#
|
||||
# The Google Performance Tools license is:
|
||||
########
|
||||
# Copyright (c) 2005, Google Inc.
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
########
|
||||
# Original file follows below.
|
||||
|
||||
# We want to access the "PC" (Program Counter) register from a struct
|
||||
# ucontext. Every system has its own way of doing that. We try all the
|
||||
# possibilities we know about. Note REG_PC should come first (REG_RIP
|
||||
# is also defined on solaris, but does the wrong thing).
|
||||
|
||||
# OpenBSD doesn't have ucontext.h, but we can get PC from ucontext_t
|
||||
# by using signal.h.
|
||||
|
||||
# The first argument of AC_PC_FROM_UCONTEXT will be invoked when we
|
||||
# cannot find a way to obtain PC from ucontext.
|
||||
|
||||
AC_DEFUN([AC_PC_FROM_UCONTEXT],
|
||||
[AC_CHECK_HEADERS(ucontext.h)
|
||||
# Redhat 7 has <sys/ucontext.h>, but it barfs if we #include it directly
|
||||
# (this was fixed in later redhats). <ucontext.h> works fine, so use that.
|
||||
if grep "Red Hat Linux release 7" /etc/redhat-release >/dev/null 2>&1; then
|
||||
AC_DEFINE(HAVE_SYS_UCONTEXT_H, 0, [<sys/ucontext.h> is broken on redhat 7])
|
||||
ac_cv_header_sys_ucontext_h=no
|
||||
else
|
||||
AC_CHECK_HEADERS(sys/ucontext.h) # ucontext on OS X 10.6 (at least)
|
||||
fi
|
||||
AC_CHECK_HEADERS(cygwin/signal.h) # ucontext on cygwin
|
||||
AC_MSG_CHECKING([how to access the program counter from a struct ucontext])
|
||||
pc_fields=" uc_mcontext.gregs[[REG_PC]]" # Solaris x86 (32 + 64 bit)
|
||||
pc_fields="$pc_fields uc_mcontext.gregs[[REG_EIP]]" # Linux (i386)
|
||||
pc_fields="$pc_fields uc_mcontext.gregs[[REG_RIP]]" # Linux (x86_64)
|
||||
pc_fields="$pc_fields uc_mcontext.sc_ip" # Linux (ia64)
|
||||
pc_fields="$pc_fields uc_mcontext.uc_regs->gregs[[PT_NIP]]" # Linux (ppc)
|
||||
pc_fields="$pc_fields uc_mcontext.gregs[[R15]]" # Linux (arm old [untested])
|
||||
pc_fields="$pc_fields uc_mcontext.arm_pc" # Linux (arm arch 5)
|
||||
pc_fields="$pc_fields uc_mcontext.gp_regs[[PT_NIP]]" # Suse SLES 11 (ppc64)
|
||||
pc_fields="$pc_fields uc_mcontext.mc_eip" # FreeBSD (i386)
|
||||
pc_fields="$pc_fields uc_mcontext.mc_rip" # FreeBSD (x86_64 [untested])
|
||||
pc_fields="$pc_fields uc_mcontext.__gregs[[_REG_EIP]]" # NetBSD (i386)
|
||||
pc_fields="$pc_fields uc_mcontext.__gregs[[_REG_RIP]]" # NetBSD (x86_64)
|
||||
pc_fields="$pc_fields uc_mcontext->ss.eip" # OS X (i386, <=10.4)
|
||||
pc_fields="$pc_fields uc_mcontext->__ss.__eip" # OS X (i386, >=10.5)
|
||||
pc_fields="$pc_fields uc_mcontext->ss.rip" # OS X (x86_64)
|
||||
pc_fields="$pc_fields uc_mcontext->__ss.__rip" # OS X (>=10.5 [untested])
|
||||
pc_fields="$pc_fields uc_mcontext->ss.srr0" # OS X (ppc, ppc64 [untested])
|
||||
pc_fields="$pc_fields uc_mcontext->__ss.__srr0" # OS X (>=10.5 [untested])
|
||||
pc_field_found=false
|
||||
for pc_field in $pc_fields; do
|
||||
if ! $pc_field_found; then
|
||||
# Prefer sys/ucontext.h to ucontext.h, for OS X's sake.
|
||||
if test "x$ac_cv_header_cygwin_signal_h" = xyes; then
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <cygwin/signal.h>]],
|
||||
[[ucontext_t u; return u.$pc_field == 0;]])],
|
||||
AC_DEFINE_UNQUOTED(PC_FROM_UCONTEXT, $pc_field,
|
||||
How to access the PC from a struct ucontext)
|
||||
AC_MSG_RESULT([$pc_field])
|
||||
pc_field_found=true)
|
||||
elif test "x$ac_cv_header_sys_ucontext_h" = xyes; then
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <sys/ucontext.h>]],
|
||||
[[ucontext_t u; return u.$pc_field == 0;]])],
|
||||
AC_DEFINE_UNQUOTED(PC_FROM_UCONTEXT, $pc_field,
|
||||
How to access the PC from a struct ucontext)
|
||||
AC_MSG_RESULT([$pc_field])
|
||||
pc_field_found=true)
|
||||
elif test "x$ac_cv_header_ucontext_h" = xyes; then
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <ucontext.h>]],
|
||||
[[ucontext_t u; return u.$pc_field == 0;]])],
|
||||
AC_DEFINE_UNQUOTED(PC_FROM_UCONTEXT, $pc_field,
|
||||
How to access the PC from a struct ucontext)
|
||||
AC_MSG_RESULT([$pc_field])
|
||||
pc_field_found=true)
|
||||
else # hope some standard header gives it to us
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]],
|
||||
[[ucontext_t u; return u.$pc_field == 0;]])],
|
||||
AC_DEFINE_UNQUOTED(PC_FROM_UCONTEXT, $pc_field,
|
||||
How to access the PC from a struct ucontext)
|
||||
AC_MSG_RESULT([$pc_field])
|
||||
pc_field_found=true)
|
||||
fi
|
||||
fi
|
||||
done
|
||||
if ! $pc_field_found; then
|
||||
pc_fields=" sc_eip" # OpenBSD (i386)
|
||||
pc_fields="$pc_fields sc_rip" # OpenBSD (x86_64)
|
||||
for pc_field in $pc_fields; do
|
||||
if ! $pc_field_found; then
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <signal.h>]],
|
||||
[[ucontext_t u; return u.$pc_field == 0;]])],
|
||||
AC_DEFINE_UNQUOTED(PC_FROM_UCONTEXT, $pc_field,
|
||||
How to access the PC from a struct ucontext)
|
||||
AC_MSG_RESULT([$pc_field])
|
||||
pc_field_found=true)
|
||||
fi
|
||||
done
|
||||
fi
|
||||
if ! $pc_field_found; then
|
||||
[$1]
|
||||
fi])
|
214
m4/pkg.m4
214
m4/pkg.m4
@ -1,214 +0,0 @@
|
||||
# pkg.m4 - Macros to locate and utilise pkg-config. -*- Autoconf -*-
|
||||
# serial 1 (pkg-config-0.24)
|
||||
#
|
||||
# Copyright © 2004 Scott James Remnant <scott@netsplit.com>.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# As a special exception to the GNU General Public License, if you
|
||||
# distribute this file as part of a program that contains a
|
||||
# configuration script generated by Autoconf, you may include it under
|
||||
# the same distribution terms that you use for the rest of that program.
|
||||
|
||||
# PKG_PROG_PKG_CONFIG([MIN-VERSION])
|
||||
# ----------------------------------
|
||||
AC_DEFUN([PKG_PROG_PKG_CONFIG],
|
||||
[m4_pattern_forbid([^_?PKG_[A-Z_]+$])
|
||||
m4_pattern_allow([^PKG_CONFIG(_(PATH|LIBDIR|SYSROOT_DIR|ALLOW_SYSTEM_(CFLAGS|LIBS)))?$])
|
||||
m4_pattern_allow([^PKG_CONFIG_(DISABLE_UNINSTALLED|TOP_BUILD_DIR|DEBUG_SPEW)$])
|
||||
AC_ARG_VAR([PKG_CONFIG], [path to pkg-config utility])
|
||||
AC_ARG_VAR([PKG_CONFIG_PATH], [directories to add to pkg-config's search path])
|
||||
AC_ARG_VAR([PKG_CONFIG_LIBDIR], [path overriding pkg-config's built-in search path])
|
||||
|
||||
if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then
|
||||
AC_PATH_TOOL([PKG_CONFIG], [pkg-config])
|
||||
fi
|
||||
if test -n "$PKG_CONFIG"; then
|
||||
_pkg_min_version=m4_default([$1], [0.9.0])
|
||||
AC_MSG_CHECKING([pkg-config is at least version $_pkg_min_version])
|
||||
if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then
|
||||
AC_MSG_RESULT([yes])
|
||||
else
|
||||
AC_MSG_RESULT([no])
|
||||
PKG_CONFIG=""
|
||||
fi
|
||||
fi[]dnl
|
||||
])# PKG_PROG_PKG_CONFIG
|
||||
|
||||
# PKG_CHECK_EXISTS(MODULES, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
|
||||
#
|
||||
# Check to see whether a particular set of modules exists. Similar
|
||||
# to PKG_CHECK_MODULES(), but does not set variables or print errors.
|
||||
#
|
||||
# Please remember that m4 expands AC_REQUIRE([PKG_PROG_PKG_CONFIG])
|
||||
# only at the first occurrence in configure.ac, so if the first place
|
||||
# it's called might be skipped (such as if it is within an "if", you
|
||||
# have to call PKG_CHECK_EXISTS manually
|
||||
# --------------------------------------------------------------
|
||||
AC_DEFUN([PKG_CHECK_EXISTS],
|
||||
[AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl
|
||||
if test -n "$PKG_CONFIG" && \
|
||||
AC_RUN_LOG([$PKG_CONFIG --exists --print-errors "$1"]); then
|
||||
m4_default([$2], [:])
|
||||
m4_ifvaln([$3], [else
|
||||
$3])dnl
|
||||
fi])
|
||||
|
||||
# _PKG_CONFIG([VARIABLE], [COMMAND], [MODULES])
|
||||
# ---------------------------------------------
|
||||
m4_define([_PKG_CONFIG],
|
||||
[if test -n "$$1"; then
|
||||
pkg_cv_[]$1="$$1"
|
||||
elif test -n "$PKG_CONFIG"; then
|
||||
PKG_CHECK_EXISTS([$3],
|
||||
[pkg_cv_[]$1=`$PKG_CONFIG --[]$2 "$3" 2>/dev/null`
|
||||
test "x$?" != "x0" && pkg_failed=yes ],
|
||||
[pkg_failed=yes])
|
||||
else
|
||||
pkg_failed=untried
|
||||
fi[]dnl
|
||||
])# _PKG_CONFIG
|
||||
|
||||
# _PKG_SHORT_ERRORS_SUPPORTED
|
||||
# -----------------------------
|
||||
AC_DEFUN([_PKG_SHORT_ERRORS_SUPPORTED],
|
||||
[AC_REQUIRE([PKG_PROG_PKG_CONFIG])
|
||||
if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then
|
||||
_pkg_short_errors_supported=yes
|
||||
else
|
||||
_pkg_short_errors_supported=no
|
||||
fi[]dnl
|
||||
])# _PKG_SHORT_ERRORS_SUPPORTED
|
||||
|
||||
|
||||
# PKG_CHECK_MODULES(VARIABLE-PREFIX, MODULES, [ACTION-IF-FOUND],
|
||||
# [ACTION-IF-NOT-FOUND])
|
||||
#
|
||||
#
|
||||
# Note that if there is a possibility the first call to
|
||||
# PKG_CHECK_MODULES might not happen, you should be sure to include an
|
||||
# explicit call to PKG_PROG_PKG_CONFIG in your configure.ac
|
||||
#
|
||||
#
|
||||
# --------------------------------------------------------------
|
||||
AC_DEFUN([PKG_CHECK_MODULES],
|
||||
[AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl
|
||||
AC_ARG_VAR([$1][_CFLAGS], [C compiler flags for $1, overriding pkg-config])dnl
|
||||
AC_ARG_VAR([$1][_LIBS], [linker flags for $1, overriding pkg-config])dnl
|
||||
|
||||
pkg_failed=no
|
||||
AC_MSG_CHECKING([for $1])
|
||||
|
||||
_PKG_CONFIG([$1][_CFLAGS], [cflags], [$2])
|
||||
_PKG_CONFIG([$1][_LIBS], [libs], [$2])
|
||||
|
||||
m4_define([_PKG_TEXT], [Alternatively, you may set the environment variables $1[]_CFLAGS
|
||||
and $1[]_LIBS to avoid the need to call pkg-config.
|
||||
See the pkg-config man page for more details.])
|
||||
|
||||
if test $pkg_failed = yes; then
|
||||
AC_MSG_RESULT([no])
|
||||
_PKG_SHORT_ERRORS_SUPPORTED
|
||||
if test $_pkg_short_errors_supported = yes; then
|
||||
$1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1`
|
||||
else
|
||||
$1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1`
|
||||
fi
|
||||
# Put the nasty error message in config.log where it belongs
|
||||
echo "$$1[]_PKG_ERRORS" >&AS_MESSAGE_LOG_FD
|
||||
|
||||
m4_default([$4], [AC_MSG_ERROR(
|
||||
[Package requirements ($2) were not met:
|
||||
|
||||
$$1_PKG_ERRORS
|
||||
|
||||
Consider adjusting the PKG_CONFIG_PATH environment variable if you
|
||||
installed software in a non-standard prefix.
|
||||
|
||||
_PKG_TEXT])[]dnl
|
||||
])
|
||||
elif test $pkg_failed = untried; then
|
||||
AC_MSG_RESULT([no])
|
||||
m4_default([$4], [AC_MSG_FAILURE(
|
||||
[The pkg-config script could not be found or is too old. Make sure it
|
||||
is in your PATH or set the PKG_CONFIG environment variable to the full
|
||||
path to pkg-config.
|
||||
|
||||
_PKG_TEXT
|
||||
|
||||
To get pkg-config, see <https://pkg-config.freedesktop.org/>.])[]dnl
|
||||
])
|
||||
else
|
||||
$1[]_CFLAGS=$pkg_cv_[]$1[]_CFLAGS
|
||||
$1[]_LIBS=$pkg_cv_[]$1[]_LIBS
|
||||
AC_MSG_RESULT([yes])
|
||||
$3
|
||||
fi[]dnl
|
||||
])# PKG_CHECK_MODULES
|
||||
|
||||
|
||||
# PKG_INSTALLDIR(DIRECTORY)
|
||||
# -------------------------
|
||||
# Substitutes the variable pkgconfigdir as the location where a module
|
||||
# should install pkg-config .pc files. By default the directory is
|
||||
# $libdir/pkgconfig, but the default can be changed by passing
|
||||
# DIRECTORY. The user can override through the --with-pkgconfigdir
|
||||
# parameter.
|
||||
AC_DEFUN([PKG_INSTALLDIR],
|
||||
[m4_pushdef([pkg_default], [m4_default([$1], ['${libdir}/pkgconfig'])])
|
||||
m4_pushdef([pkg_description],
|
||||
[pkg-config installation directory @<:@]pkg_default[@:>@])
|
||||
AC_ARG_WITH([pkgconfigdir],
|
||||
[AS_HELP_STRING([--with-pkgconfigdir], pkg_description)],,
|
||||
[with_pkgconfigdir=]pkg_default)
|
||||
AC_SUBST([pkgconfigdir], [$with_pkgconfigdir])
|
||||
m4_popdef([pkg_default])
|
||||
m4_popdef([pkg_description])
|
||||
]) dnl PKG_INSTALLDIR
|
||||
|
||||
|
||||
# PKG_NOARCH_INSTALLDIR(DIRECTORY)
|
||||
# -------------------------
|
||||
# Substitutes the variable noarch_pkgconfigdir as the location where a
|
||||
# module should install arch-independent pkg-config .pc files. By
|
||||
# default the directory is $datadir/pkgconfig, but the default can be
|
||||
# changed by passing DIRECTORY. The user can override through the
|
||||
# --with-noarch-pkgconfigdir parameter.
|
||||
AC_DEFUN([PKG_NOARCH_INSTALLDIR],
|
||||
[m4_pushdef([pkg_default], [m4_default([$1], ['${datadir}/pkgconfig'])])
|
||||
m4_pushdef([pkg_description],
|
||||
[pkg-config arch-independent installation directory @<:@]pkg_default[@:>@])
|
||||
AC_ARG_WITH([noarch-pkgconfigdir],
|
||||
[AS_HELP_STRING([--with-noarch-pkgconfigdir], pkg_description)],,
|
||||
[with_noarch_pkgconfigdir=]pkg_default)
|
||||
AC_SUBST([noarch_pkgconfigdir], [$with_noarch_pkgconfigdir])
|
||||
m4_popdef([pkg_default])
|
||||
m4_popdef([pkg_description])
|
||||
]) dnl PKG_NOARCH_INSTALLDIR
|
||||
|
||||
|
||||
# PKG_CHECK_VAR(VARIABLE, MODULE, CONFIG-VARIABLE,
|
||||
# [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
|
||||
# -------------------------------------------
|
||||
# Retrieves the value of the pkg-config variable for the given module.
|
||||
AC_DEFUN([PKG_CHECK_VAR],
|
||||
[AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl
|
||||
AC_ARG_VAR([$1], [value of $3 for $2, overriding pkg-config])dnl
|
||||
|
||||
_PKG_CONFIG([$1], [variable="][$3]["], [$2])
|
||||
AS_VAR_COPY([$1], [pkg_cv_][$1])
|
||||
|
||||
AS_VAR_IF([$1], [""], [$5], [$4])dnl
|
||||
])# PKG_CHECK_VAR
|
228
m4/pprint.m4
228
m4/pprint.m4
@ -1,228 +0,0 @@
|
||||
# Pretty printing macros.
|
||||
#
|
||||
# Copyright (C) 2019 - Philippe Proulx <pproulx@efficios.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License, version 2 only, as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# As a special exception to the GNU General Public License, if you distribute
|
||||
# this file as part of a program that contains a configuration script
|
||||
# generated by Autoconf, you may include it under the same distribution terms
|
||||
# that you use for the rest of that program.
|
||||
|
||||
#serial 1
|
||||
|
||||
# PPRINT_INIT(): initializes the pretty printing system.
|
||||
#
|
||||
# Use this macro before using any other PPRINT_* macro.
|
||||
AC_DEFUN([PPRINT_INIT], [
|
||||
m4_define([PPRINT_CONFIG_TS], [50])
|
||||
m4_define([PPRINT_CONFIG_INDENT], [2])
|
||||
PPRINT_YES_MSG=yes
|
||||
PPRINT_NO_MSG=no
|
||||
|
||||
# find tput, which tells us if colors are supported and gives us color codes
|
||||
AC_PATH_PROG([pprint_tput], [tput])
|
||||
|
||||
AS_IF([test -n "$pprint_tput"], [
|
||||
AS_IF([test -n "$PS1" && test `"$pprint_tput" colors` -ge 8 && test -t 1], [
|
||||
# interactive shell and colors supported and standard output
|
||||
# file descriptor is opened on a terminal
|
||||
PPRINT_COLOR_TXTBLK="`"$pprint_tput" setaf 0`"
|
||||
PPRINT_COLOR_TXTBLU="`"$pprint_tput" setaf 4`"
|
||||
PPRINT_COLOR_TXTGRN="`"$pprint_tput" setaf 2`"
|
||||
PPRINT_COLOR_TXTCYN="`"$pprint_tput" setaf 6`"
|
||||
PPRINT_COLOR_TXTRED="`"$pprint_tput" setaf 1`"
|
||||
PPRINT_COLOR_TXTPUR="`"$pprint_tput" setaf 5`"
|
||||
PPRINT_COLOR_TXTYLW="`"$pprint_tput" setaf 3`"
|
||||
PPRINT_COLOR_TXTWHT="`"$pprint_tput" setaf 7`"
|
||||
PPRINT_COLOR_BLD=`"$pprint_tput" bold`
|
||||
PPRINT_COLOR_BLDBLK="$PPRINT_COLOR_BLD$PPRINT_COLOR_TXTBLK"
|
||||
PPRINT_COLOR_BLDBLU="$PPRINT_COLOR_BLD$PPRINT_COLOR_TXTBLU"
|
||||
PPRINT_COLOR_BLDGRN="$PPRINT_COLOR_BLD$PPRINT_COLOR_TXTGRN"
|
||||
PPRINT_COLOR_BLDCYN="$PPRINT_COLOR_BLD$PPRINT_COLOR_TXTCYN"
|
||||
PPRINT_COLOR_BLDRED="$PPRINT_COLOR_BLD$PPRINT_COLOR_TXTRED"
|
||||
PPRINT_COLOR_BLDPUR="$PPRINT_COLOR_BLD$PPRINT_COLOR_TXTPUR"
|
||||
PPRINT_COLOR_BLDYLW="$PPRINT_COLOR_BLD$PPRINT_COLOR_TXTYLW"
|
||||
PPRINT_COLOR_BLDWHT="$PPRINT_COLOR_BLD$PPRINT_COLOR_TXTWHT"
|
||||
PPRINT_COLOR_RST="`"$pprint_tput" sgr0`"
|
||||
|
||||
# colored yes and no
|
||||
PPRINT_YES_MSG="$PPRINT_COLOR_BLDGRN$PPRINT_YES_MSG$PPRINT_COLOR_RST"
|
||||
PPRINT_NO_MSG="$PPRINT_COLOR_BLDRED$PPRINT_NO_MSG$PPRINT_COLOR_RST"
|
||||
|
||||
# subtitle color
|
||||
PPRINT_COLOR_SUBTITLE="$PPRINT_COLOR_BLDCYN"
|
||||
])
|
||||
])
|
||||
])
|
||||
|
||||
# PPRINT_SET_INDENT(indent): sets the current indentation.
|
||||
#
|
||||
# Use PPRINT_INIT() before using this macro.
|
||||
AC_DEFUN([PPRINT_SET_INDENT], [
|
||||
m4_define([PPRINT_CONFIG_INDENT], [$1])
|
||||
])
|
||||
|
||||
# PPRINT_SET_TS(ts): sets the current tab stop.
|
||||
#
|
||||
# Use PPRINT_INIT() before using this macro.
|
||||
AC_DEFUN([PPRINT_SET_TS], [
|
||||
m4_define([PPRINT_CONFIG_TS], [$1])
|
||||
])
|
||||
|
||||
# PPRINT_SUBTITLE(subtitle): pretty prints a subtitle.
|
||||
#
|
||||
# The subtitle is put as is in a double-quoted shell string so the user
|
||||
# needs to escape ".
|
||||
#
|
||||
# Use PPRINT_INIT() before using this macro.
|
||||
AC_DEFUN([PPRINT_SUBTITLE], [
|
||||
AS_ECHO(["${PPRINT_COLOR_SUBTITLE}$1$PPRINT_COLOR_RST"])
|
||||
])
|
||||
|
||||
AC_DEFUN([_PPRINT_INDENT], [
|
||||
m4_if(PPRINT_CONFIG_INDENT, 0, [
|
||||
], [
|
||||
m4_for([pprint_i], [0], m4_eval(PPRINT_CONFIG_INDENT * 2 - 1), [1], [
|
||||
AS_ECHO_N([" "])
|
||||
])
|
||||
])
|
||||
])
|
||||
|
||||
# PPRINT_PROP_STRING(title, value, title_color?): pretty prints a
|
||||
# string property.
|
||||
#
|
||||
# The title is put as is in a double-quoted shell string so the user
|
||||
# needs to escape ".
|
||||
#
|
||||
# The $PPRINT_CONFIG_INDENT variable must be set to the desired indentation
|
||||
# level.
|
||||
#
|
||||
# Use PPRINT_INIT() before using this macro.
|
||||
AC_DEFUN([PPRINT_PROP_STRING], [
|
||||
m4_pushdef([pprint_title], [$1])
|
||||
m4_pushdef([pprint_value], [$2])
|
||||
m4_pushdef([pprint_title_color], m4_default([$3], []))
|
||||
m4_pushdef([pprint_title_len], m4_len(pprint_title))
|
||||
m4_pushdef([pprint_spaces_cnt], m4_eval(PPRINT_CONFIG_TS - pprint_title_len - (PPRINT_CONFIG_INDENT * 2) - 1))
|
||||
|
||||
m4_if(m4_eval(pprint_spaces_cnt <= 0), [1], [
|
||||
m4_define([pprint_spaces_cnt], [1])
|
||||
])
|
||||
|
||||
m4_pushdef([pprint_spaces], [])
|
||||
|
||||
m4_for([pprint_i], 0, m4_eval(pprint_spaces_cnt - 1), [1], [
|
||||
m4_append([pprint_spaces], [ ])
|
||||
])
|
||||
|
||||
_PPRINT_INDENT
|
||||
|
||||
AS_ECHO_N(["pprint_title_color""pprint_title$PPRINT_COLOR_RST:pprint_spaces"])
|
||||
AS_ECHO(["${PPRINT_COLOR_BLD}pprint_value$PPRINT_COLOR_RST"])
|
||||
|
||||
m4_popdef([pprint_spaces])
|
||||
m4_popdef([pprint_spaces_cnt])
|
||||
m4_popdef([pprint_title_len])
|
||||
m4_popdef([pprint_title_color])
|
||||
m4_popdef([pprint_value])
|
||||
m4_popdef([pprint_title])
|
||||
])
|
||||
|
||||
# PPRINT_PROP_BOOL(title, value, title_color?): pretty prints a boolean
|
||||
# property.
|
||||
#
|
||||
# The title is put as is in a double-quoted shell string so the user
|
||||
# needs to escape ".
|
||||
#
|
||||
# The value is evaluated at shell runtime. Its evaluation must be
|
||||
# 0 (false) or 1 (true).
|
||||
#
|
||||
# Uses the PPRINT_PROP_STRING() with the "yes" or "no" string.
|
||||
#
|
||||
# Use PPRINT_INIT() before using this macro.
|
||||
AC_DEFUN([PPRINT_PROP_BOOL], [
|
||||
m4_pushdef([pprint_title], [$1])
|
||||
m4_pushdef([pprint_value], [$2])
|
||||
|
||||
test pprint_value -eq 0 && pprint_msg="$PPRINT_NO_MSG" || pprint_msg="$PPRINT_YES_MSG"
|
||||
|
||||
m4_if([$#], [3], [
|
||||
PPRINT_PROP_STRING(pprint_title, [$pprint_msg], $3)
|
||||
], [
|
||||
PPRINT_PROP_STRING(pprint_title, [$pprint_msg])
|
||||
])
|
||||
|
||||
m4_popdef([pprint_value])
|
||||
m4_popdef([pprint_title])
|
||||
])
|
||||
|
||||
# PPRINT_PROP_BOOL_CUSTOM(title, value, no_msg, title_color?): pretty prints a boolean
|
||||
# property.
|
||||
#
|
||||
# The title is put as is in a double-quoted shell string so the user
|
||||
# needs to escape ".
|
||||
#
|
||||
# The value is evaluated at shell runtime. Its evaluation must be
|
||||
# 0 (false) or 1 (true).
|
||||
#
|
||||
# Uses the PPRINT_PROP_STRING() with the "yes" or "no" string.
|
||||
#
|
||||
# Use PPRINT_INIT() before using this macro.
|
||||
AC_DEFUN([PPRINT_PROP_BOOL_CUSTOM], [
|
||||
m4_pushdef([pprint_title], [$1])
|
||||
m4_pushdef([pprint_value], [$2])
|
||||
m4_pushdef([pprint_value_no_msg], [$3])
|
||||
|
||||
test pprint_value -eq 0 && pprint_msg="$PPRINT_NO_MSG (pprint_value_no_msg)" || pprint_msg="$PPRINT_YES_MSG"
|
||||
|
||||
m4_if([$#], [4], [
|
||||
PPRINT_PROP_STRING(pprint_title, [$pprint_msg], $4)
|
||||
], [
|
||||
PPRINT_PROP_STRING(pprint_title, [$pprint_msg])
|
||||
])
|
||||
|
||||
m4_popdef([pprint_value_no_msg])
|
||||
m4_popdef([pprint_value])
|
||||
m4_popdef([pprint_title])
|
||||
])
|
||||
|
||||
# PPRINT_WARN(msg): pretty prints a warning message.
|
||||
#
|
||||
# The message is put as is in a double-quoted shell string so the user
|
||||
# needs to escape ".
|
||||
#
|
||||
# Use PPRINT_INIT() before using this macro.
|
||||
AC_DEFUN([PPRINT_WARN], [
|
||||
m4_pushdef([pprint_msg], [$1])
|
||||
|
||||
_PPRINT_INDENT
|
||||
AS_ECHO(["${PPRINT_COLOR_TXTYLW}WARNING:$PPRINT_COLOR_RST ${PPRINT_COLOR_BLDYLW}pprint_msg$PPRINT_COLOR_RST"])
|
||||
|
||||
m4_popdef([pprint_msg])
|
||||
])
|
||||
|
||||
# PPRINT_ERROR(msg): pretty prints an error message and exits.
|
||||
#
|
||||
# The message is put as is in a double-quoted shell string so the user
|
||||
# needs to escape ".
|
||||
#
|
||||
# Use PPRINT_INIT() before using this macro.
|
||||
AC_DEFUN([PPRINT_ERROR], [
|
||||
m4_pushdef([pprint_msg], [$1])
|
||||
|
||||
AC_MSG_ERROR([${PPRINT_COLOR_BLDRED}pprint_msg$PPRINT_COLOR_RST])
|
||||
|
||||
m4_popdef([pprint_msg])
|
||||
])
|
@ -1,69 +0,0 @@
|
||||
The scripts directory holds tools for use in building, generating, testing,
|
||||
and maintaining the Tor source code. It is mainly for use by developers.
|
||||
|
||||
Code maintenance scripts
|
||||
------------------------
|
||||
|
||||
maint/checkLogs.pl -- Verify that Tor log statements are unique.
|
||||
|
||||
maint/check_config_macros.pl -- Look for autoconf tests whose results are
|
||||
never used.
|
||||
|
||||
maint/checkOptionDocs.pl -- Make sure that Tor options are documented in the
|
||||
manpage, and that the manpage only documents real Tor options.
|
||||
|
||||
maint/checkSpace.pl -- Style checker for the Tor source code. Mainly checks
|
||||
whitespace.
|
||||
|
||||
maint/findMergedChanges.pl -- Find a set of changes/* files that have been
|
||||
merged into an upstream version.
|
||||
|
||||
maint/format_changelog.py -- Flow the changelog into the proper format.
|
||||
|
||||
maint/redox.py -- Find places that should have DOCDOC comments to indicate a
|
||||
need for doxygen comments, and put those comments there.
|
||||
|
||||
maint/update_versions.py -- Update the version number in the .nsi and windows
|
||||
orconfig.h files.
|
||||
|
||||
|
||||
Testing scripts
|
||||
---------------
|
||||
|
||||
test/chutney-git-bisect.sh -- a git bisect run script that bisects using
|
||||
chutney. The script builds tor and tor-gencert, then runs chutney. The script
|
||||
takes optional arguments for out-of-tree builds, and specific chutney network
|
||||
flavours. You should copy this script before using it with git bisect, so that
|
||||
it doesn't change (or disappear) during bisection.
|
||||
|
||||
test/cov-blame -- Mash up the results of gcov with git blame. Mainly useful
|
||||
to find out who has been writing untested code.
|
||||
|
||||
test/cov-diff -- Compare two directories of gcov files to identify changed
|
||||
lines without coverage.
|
||||
|
||||
test/coverage -- Generates a directory full of gcov files. You need to use
|
||||
this script instead of calling gcov directly because of our confusingly named
|
||||
object files.
|
||||
|
||||
test/scan-build.sh -- Example script for invoking clang's scan-build
|
||||
static analysis tools.
|
||||
|
||||
|
||||
Code generation scripts
|
||||
-----------------------
|
||||
|
||||
codegen/gen_linux_syscalls.pl -- Generate a table mapping linux syscall
|
||||
numbers to their names.
|
||||
|
||||
codegen/gen_server_ciphers.py -- Generate a sorted list of TLS ciphersuites
|
||||
for servers to choose from.
|
||||
|
||||
codegen/get_mozilla_ciphers.py -- Generate a list of TLS ciphersuites for
|
||||
clients to use in order to look like Firefox.
|
||||
|
||||
Code transformation scripts
|
||||
---------------------------
|
||||
|
||||
coccinelle/calloc.cocci -- Transform code to replace variants of
|
||||
malloc(a*b) with calloc(a,b)
|
@ -1,40 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
TMPDIR="$(mktemp -d -t tor_lib_combining.XXXXXX)"
|
||||
ORIGDIR="$(pwd)"
|
||||
|
||||
trap 'cd "$ORIGDIR" && rm -rf "$TMPDIR"' 0
|
||||
|
||||
abspath() {
|
||||
echo "$(cd "$(dirname "$1")" >/dev/null && pwd)/$(basename "$1")"
|
||||
}
|
||||
|
||||
apple_symdef_fix() {
|
||||
# On modern macOS and iOS we need to remove the "__.SYMDEF" and "__.SYMDEF
|
||||
# SORTED" before we repack the archive.
|
||||
# See: tor#40683.
|
||||
if [ "$(uname -s)" = "Darwin" ] ; then
|
||||
find . -name "__.SYMDEF*" -delete
|
||||
fi
|
||||
}
|
||||
|
||||
TARGET=$(abspath "$1")
|
||||
|
||||
shift
|
||||
|
||||
for input in "$@"; do
|
||||
cd "$ORIGDIR"
|
||||
abs=$(abspath "$input")
|
||||
dir="$TMPDIR"/$(basename "$input" .a)
|
||||
mkdir "$dir"
|
||||
cd "$dir" >/dev/null
|
||||
"${AR:-ar}" x "$abs"
|
||||
done
|
||||
|
||||
cd "$TMPDIR" >/dev/null
|
||||
apple_symdef_fix
|
||||
"${AR:-ar}" "${ARFLAGS:-cru}" library.tmp.a ./*/**
|
||||
"${RANLIB:-ranlib}" library.tmp.a
|
||||
mv -f library.tmp.a "$TARGET"
|
@ -1,491 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
####
|
||||
# DO NOT EDIT THIS FILE IN MASTER. ONLY EDIT IT IN THE OLDEST SUPPORTED
|
||||
# BRANCH, THEN MERGE FORWARD.
|
||||
####
|
||||
|
||||
# This script is used to build Tor for continuous integration. It should
|
||||
# be kept the same for all supported Tor versions.
|
||||
#
|
||||
# It's subject to the regular Tor license; see LICENSE for copying
|
||||
# information.
|
||||
|
||||
set -o errexit
|
||||
set -o nounset
|
||||
|
||||
# Options for this script.
|
||||
DEBUG_CI="${DEBUG_CI:-no}"
|
||||
COLOR_CI="${COLOR_CI:-yes}"
|
||||
|
||||
# Options for which CI system this is.
|
||||
ON_GITLAB="${ON_GITLAB:-yes}"
|
||||
|
||||
# Options for things we usually won't want to skip.
|
||||
RUN_STAGE_CONFIGURE="${RUN_STAGE_CONFIGURE:-yes}"
|
||||
RUN_STAGE_BUILD="${RUN_STAGE_BUILD:-yes}"
|
||||
RUN_STAGE_TEST="${RUN_STAGE_TEST:-yes}"
|
||||
|
||||
# Options for how to build Tor. All should be yes/no.
|
||||
FATAL_WARNINGS="${FATAL_WARNINGS:-yes}"
|
||||
HARDENING="${HARDENING:-no}"
|
||||
COVERAGE="${COVERAGE:-no}"
|
||||
DOXYGEN="${DOXYGEN:-no}"
|
||||
ASCIIDOC="${ASCIIDOC:-no}"
|
||||
TRACING="${TRACING:-no}"
|
||||
ALL_BUGS_ARE_FATAL="${ALL_BUGS_ARE_FATAL:-no}"
|
||||
DISABLE_DIRAUTH="${DISABLE_DIRAUTH:-no}"
|
||||
DISABLE_RELAY="${DISABLE_RELAY:-no}"
|
||||
NSS="${NSS:-no}"
|
||||
GPL="${GPL:-no}"
|
||||
|
||||
# Options for which tests to run. All should be yes/no.
|
||||
CHECK="${CHECK:-yes}"
|
||||
STEM="${STEM:-no}"
|
||||
CHUTNEY="${CHUTNEY:-no}"
|
||||
DISTCHECK="${DISTCHECK:-no}"
|
||||
|
||||
# Options for where the Tor source is.
|
||||
CI_SRCDIR="${CI_SRCDIR:-.}"
|
||||
|
||||
# Options for where to build.
|
||||
CI_BUILDDIR="${CI_BUILDDIR:-./build}"
|
||||
|
||||
# How parallel should we run make?
|
||||
MAKE_J_OPT="${MAKE_J_OPT:--j4}"
|
||||
# Should we stop after make finds an error?
|
||||
MAKE_K_OPT="${MAKE_K_OPT:--k}"
|
||||
|
||||
# What make target should we use for chutney?
|
||||
CHUTNEY_MAKE_TARGET="${CHUTNEY_MAKE_TARGET:-test-network}"
|
||||
|
||||
# Where do we find our additional testing tools?
|
||||
CHUTNEY_PATH="${CHUTNEY_PATH:-}"
|
||||
STEM_PATH="${STEM_PATH:-}"
|
||||
|
||||
#############################################################################
|
||||
# Preliminary functions.
|
||||
|
||||
# Terminal coloring/emphasis stuff.
|
||||
if [[ "${COLOR_CI}" == "yes" ]]; then
|
||||
T_RED=$(tput setaf 1 || true)
|
||||
T_GREEN=$(tput setaf 2 || true)
|
||||
T_YELLOW=$(tput setaf 3 || true)
|
||||
T_DIM=$(tput dim || true)
|
||||
T_BOLD=$(tput bold || true)
|
||||
T_RESET=$(tput sgr0 || true)
|
||||
else
|
||||
T_RED=
|
||||
T_GREEN=
|
||||
T_YELLOW=
|
||||
T_DIM=
|
||||
T_BOLD=
|
||||
T_RESET=
|
||||
fi
|
||||
|
||||
function error()
|
||||
{
|
||||
echo "${T_BOLD}${T_RED}ERROR:${T_RESET} $*" 1>&2
|
||||
}
|
||||
|
||||
function die()
|
||||
{
|
||||
echo "${T_BOLD}${T_RED}FATAL ERROR:${T_RESET} $*" 1>&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
function skipping()
|
||||
{
|
||||
echo "${T_BOLD}${T_YELLOW}Skipping $*${T_RESET}"
|
||||
}
|
||||
|
||||
function hooray()
|
||||
{
|
||||
echo "${T_BOLD}${T_GREEN}$*${T_RESET}"
|
||||
}
|
||||
|
||||
if [[ "${DEBUG_CI}" == "yes" ]]; then
|
||||
function debug()
|
||||
{
|
||||
echo "${T_DIM}(debug): $*${T_RESET}"
|
||||
}
|
||||
else
|
||||
function debug()
|
||||
{
|
||||
:
|
||||
}
|
||||
fi
|
||||
|
||||
function yes_or_no()
|
||||
{
|
||||
local varname="$1"
|
||||
local value="${!varname}"
|
||||
debug "${varname} is ${value}"
|
||||
if [[ "${value}" != 'yes' && "${value}" != 'no' ]]; then
|
||||
die "${varname} must be 'yes' or 'no'. Got unexpected value ${value}".
|
||||
fi
|
||||
}
|
||||
|
||||
function incompatible()
|
||||
{
|
||||
local varname1="$1"
|
||||
local varname2="$2"
|
||||
local val1="${!varname1}"
|
||||
local val2="${!varname2}"
|
||||
if [[ "${val1}" = 'yes' && "${val2}" = 'yes' ]]; then
|
||||
die "Cannot set both ${varname1} and ${varname2}: they are incompatible."
|
||||
fi
|
||||
}
|
||||
|
||||
function runcmd()
|
||||
{
|
||||
echo "${T_BOLD}\$ $*${T_RESET}"
|
||||
if ! "$@" ; then
|
||||
error "command '$*' has failed."
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function show_git_version()
|
||||
{
|
||||
local tool="$1"
|
||||
local dir="$2"
|
||||
local version="?????"
|
||||
if [[ -e "$dir/.git" ]] ; then
|
||||
version=$(cd "$dir"; git rev-parse HEAD)
|
||||
fi
|
||||
echo "${T_BOLD}$tool:${T_RESET} $version"
|
||||
}
|
||||
|
||||
if [[ "${ON_GITLAB}" == "yes" ]]; then
|
||||
function start_section()
|
||||
{
|
||||
local label="$1"
|
||||
local stamp
|
||||
stamp=$(date +%s)
|
||||
printf "section_start:%s:%s\r\e[0K" "$stamp" "$label"
|
||||
echo "${T_BOLD}${T_GREEN}========= $label${T_RESET}"
|
||||
}
|
||||
function end_section()
|
||||
{
|
||||
local label="$1"
|
||||
local stamp
|
||||
stamp=$(date +%s)
|
||||
printf "section_end:%s:%s\r\e[0K" "$stamp" "$label"
|
||||
}
|
||||
else
|
||||
function start_section()
|
||||
{
|
||||
true
|
||||
}
|
||||
function end_section()
|
||||
{
|
||||
true
|
||||
}
|
||||
fi
|
||||
|
||||
#############################################################################
|
||||
# Validate inputs.
|
||||
|
||||
debug Validating inputs
|
||||
yes_or_no DEBUG_CI
|
||||
yes_or_no COLOR_CI
|
||||
yes_or_no ON_GITLAB
|
||||
yes_or_no FATAL_WARNINGS
|
||||
yes_or_no HARDENING
|
||||
yes_or_no COVERAGE
|
||||
yes_or_no DOXYGEN
|
||||
yes_or_no ASCIIDOC
|
||||
yes_or_no TRACING
|
||||
yes_or_no ALL_BUGS_ARE_FATAL
|
||||
yes_or_no DISABLE_DIRAUTH
|
||||
yes_or_no DISABLE_RELAY
|
||||
yes_or_no NSS
|
||||
yes_or_no GPL
|
||||
|
||||
yes_or_no RUN_STAGE_CONFIGURE
|
||||
yes_or_no RUN_STAGE_BUILD
|
||||
yes_or_no RUN_STAGE_TEST
|
||||
|
||||
yes_or_no CHECK
|
||||
yes_or_no STEM
|
||||
yes_or_no DISTCHECK
|
||||
|
||||
incompatible DISTCHECK CHECK
|
||||
incompatible DISTCHECK CHUTNEY
|
||||
incompatible DISTCHECK STEM
|
||||
incompatible DISTCHECK COVERAGE
|
||||
incompatible DISTCHECK DOXYGEN
|
||||
|
||||
if [[ "${CHUTNEY}" = yes && "${CHUTNEY_PATH}" = '' ]] ; then
|
||||
die "CHUTNEY is set to 'yes', but CHUTNEY_PATH was not specified."
|
||||
fi
|
||||
|
||||
if [[ "${STEM}" = yes && "${STEM_PATH}" = '' ]] ; then
|
||||
die "STEM is set to 'yes', but STEM_PATH was not specified."
|
||||
fi
|
||||
|
||||
#############################################################################
|
||||
# Set up options for make and configure.
|
||||
|
||||
make_options=()
|
||||
if [[ "$MAKE_J_OPT" != "" ]]; then
|
||||
make_options+=("$MAKE_J_OPT")
|
||||
fi
|
||||
if [[ "$MAKE_K_OPT" != "" ]]; then
|
||||
make_options+=("$MAKE_K_OPT")
|
||||
fi
|
||||
|
||||
configure_options=()
|
||||
if [[ "$FATAL_WARNINGS" == "yes" ]]; then
|
||||
configure_options+=("--enable-fatal-warnings")
|
||||
fi
|
||||
if [[ "$HARDENING" == "yes" ]]; then
|
||||
configure_options+=("--enable-fragile-hardening")
|
||||
fi
|
||||
if [[ "$COVERAGE" == "yes" ]]; then
|
||||
configure_options+=("--enable-coverage")
|
||||
fi
|
||||
if [[ "$ASCIIDOC" != "yes" ]]; then
|
||||
configure_options+=("--disable-asciidoc")
|
||||
fi
|
||||
if [[ "$TRACING" == "yes" ]]; then
|
||||
configure_options+=("--enable-tracing-instrumentation-lttng")
|
||||
fi
|
||||
if [[ "$ALL_BUGS_ARE_FATAL" == "yes" ]]; then
|
||||
configure_options+=("--enable-all-bugs-are-fatal")
|
||||
fi
|
||||
if [[ "$DISABLE_DIRAUTH" == "yes" ]]; then
|
||||
configure_options+=("--disable-module-dirauth")
|
||||
fi
|
||||
if [[ "$DISABLE_RELAY" == "yes" ]]; then
|
||||
configure_options+=("--disable-module-relay")
|
||||
fi
|
||||
if [[ "$NSS" == "yes" ]]; then
|
||||
configure_options+=("--enable-nss")
|
||||
fi
|
||||
if [[ "$GPL" == "yes" ]]; then
|
||||
configure_options+=("--enable-gpl")
|
||||
fi
|
||||
|
||||
#############################################################################
|
||||
# Tell the user about our versions of different tools and packages.
|
||||
|
||||
uname -a
|
||||
printf "python: "
|
||||
python -V || echo "no 'python' binary."
|
||||
printf "python3: "
|
||||
python3 -V || echo "no 'python3' binary."
|
||||
|
||||
show_git_version Tor "${CI_SRCDIR}"
|
||||
if [[ "${STEM}" = "yes" ]]; then
|
||||
show_git_version Stem "${STEM_PATH}"
|
||||
fi
|
||||
if [[ "${CHUTNEY}" = "yes" ]]; then
|
||||
show_git_version Chutney "${CHUTNEY_PATH}"
|
||||
fi
|
||||
|
||||
#############################################################################
|
||||
# Determine the version of Tor.
|
||||
|
||||
TOR_VERSION=$(grep -m 1 AC_INIT "${CI_SRCDIR}"/configure.ac | sed -e 's/.*\[//; s/\].*//;')
|
||||
|
||||
# Use variables like these when we need to behave differently depending on
|
||||
# Tor version. Only create the variables we need.
|
||||
TOR_VER_AT_LEAST_043=no
|
||||
TOR_VER_AT_LEAST_044=no
|
||||
|
||||
# These are the currently supported Tor versions; no need to work with anything
|
||||
# ancient in this script.
|
||||
case "$TOR_VERSION" in
|
||||
0.4.7.*)
|
||||
TOR_VER_AT_LEAST_043=yes
|
||||
TOR_VER_AT_LEAST_044=yes
|
||||
;;
|
||||
esac
|
||||
|
||||
#############################################################################
|
||||
# Make sure the directories are all there.
|
||||
|
||||
# Make sure CI_SRCDIR exists and has a file we expect.
|
||||
if [[ ! -d "$CI_SRCDIR" ]] ; then
|
||||
die "CI_SRCDIR=${CI_SRCDIR} is not a directory"
|
||||
fi
|
||||
if [[ ! -f "$CI_SRCDIR/src/core/or/or.h" ]] ; then
|
||||
die "CI_SRCDIR=${CI_SRCDIR} does not look like a Tor directory."
|
||||
fi
|
||||
|
||||
# Make CI_SRCDIR absolute.
|
||||
CI_SRCDIR=$(cd "$CI_SRCDIR" && pwd)
|
||||
|
||||
# Create an "artifacts" directory to copy artifacts into.
|
||||
mkdir -p ./artifacts
|
||||
|
||||
if [[ "$RUN_STAGE_CONFIGURE" = "yes" ]]; then
|
||||
|
||||
start_section "Autogen"
|
||||
runcmd cd "${CI_SRCDIR}"
|
||||
runcmd ./autogen.sh
|
||||
runcmd mkdir -p "${CI_BUILDDIR}"
|
||||
runcmd cd "${CI_BUILDDIR}"
|
||||
end_section "Autogen"
|
||||
|
||||
# make the builddir absolute too.
|
||||
CI_BUILDDIR=$(pwd)
|
||||
|
||||
start_section "Configure"
|
||||
if ! runcmd "${CI_SRCDIR}"/configure "${configure_options[@]}" ; then
|
||||
error "Here is the end of config.log:"
|
||||
runcmd tail config.log
|
||||
die "Unable to continue"
|
||||
fi
|
||||
end_section "Configure"
|
||||
else
|
||||
debug "Skipping configure stage. Making sure that ${CI_BUILDDIR}/config.log exists."
|
||||
if [[ ! -d "${CI_BUILDDIR}" ]]; then
|
||||
die "Build directory ${CI_BUILDDIR} did not exist!"
|
||||
fi
|
||||
if [[ ! -f "${CI_BUILDDIR}/config.log" ]]; then
|
||||
die "Tor was not configured in ${CI_BUILDDIR}!"
|
||||
fi
|
||||
|
||||
cp config.log "${CI_SRCDIR}"/artifacts
|
||||
|
||||
runcmd cd "${CI_BUILDDIR}"
|
||||
CI_BUILDDIR=$(pwd)
|
||||
fi
|
||||
|
||||
###############################
|
||||
# Build Tor.
|
||||
|
||||
if [[ "$RUN_STAGE_BUILD" = "yes" ]] ; then
|
||||
if [[ "$DISTCHECK" = "no" ]]; then
|
||||
start_section "Build"
|
||||
runcmd make "${make_options[@]}" all
|
||||
cp src/app/tor "${CI_SRCDIR}"/artifacts
|
||||
end_section "Build"
|
||||
else
|
||||
export DISTCHECK_CONFIGURE_FLAGS="${configure_options[*]}"
|
||||
# XXXX Set make options?
|
||||
start_section Distcheck
|
||||
if runcmd make "${make_options[@]}" distcheck ; then
|
||||
hooray "Distcheck was successful. Nothing further will be done."
|
||||
# We have to exit early here, since we can't do any other tests.
|
||||
cp tor-*.tar.gz "${CI_SRCDIR}"/artifacts
|
||||
else
|
||||
error "Diagnostics:"
|
||||
runcmd make show-distdir-testlog || true
|
||||
runcmd make show-distdir-core || true
|
||||
die "Unable to continue."
|
||||
fi
|
||||
end_section Distcheck
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
##############################
|
||||
# Run tests.
|
||||
|
||||
if [[ "$RUN_STAGE_TEST" == "no" ]]; then
|
||||
echo "Skipping tests. Exiting now."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
FAILED_TESTS=""
|
||||
|
||||
if [[ "${DOXYGEN}" = 'yes' ]]; then
|
||||
start_section Doxygen
|
||||
if [[ "${TOR_VER_AT_LEAST_043}" = 'yes' ]]; then
|
||||
if runcmd make doxygen; then
|
||||
hooray "make doxygen has succeeded."
|
||||
else
|
||||
FAILED_TESTS="${FAILED_TESTS} doxygen"
|
||||
fi
|
||||
else
|
||||
skipping "make doxygen: doxygen is broken for Tor < 0.4.3"
|
||||
fi
|
||||
end_section Doxygen
|
||||
fi
|
||||
|
||||
if [[ "${ASCIIDOC}" = 'yes' ]]; then
|
||||
start_section Asciidoc
|
||||
if runcmd make manpages; then
|
||||
hooray "make manpages has succeeded."
|
||||
else
|
||||
FAILED_TESTS="${FAILED_TESTS} asciidoc"
|
||||
fi
|
||||
end_section Asciidoc
|
||||
fi
|
||||
|
||||
if [[ "${CHECK}" = "yes" ]]; then
|
||||
start_section "Check"
|
||||
if runcmd make "${make_options[@]}" check; then
|
||||
hooray "make check has succeeded."
|
||||
else
|
||||
error "Here are the contents of the test suite output:"
|
||||
runcmd cat test-suite.log || true
|
||||
FAILED_TESTS="${FAILED_TESTS} check"
|
||||
fi
|
||||
end_section "Check"
|
||||
fi
|
||||
|
||||
if [[ "${CHUTNEY}" = "yes" ]]; then
|
||||
start_section "Chutney"
|
||||
export CHUTNEY_TOR_SANDBOX=0
|
||||
export CHUTNEY_ALLOW_FAILURES=2
|
||||
# Send 5MB for every verify check.
|
||||
export CHUTNEY_DATA_BYTES=5000000
|
||||
if runcmd make "${CHUTNEY_MAKE_TARGET}"; then
|
||||
hooray "Chutney tests have succeeded"
|
||||
else
|
||||
error "Chutney says:"
|
||||
export CHUTNEY_DATA_DIR="${CHUTNEY_PATH}/net"
|
||||
runcmd "${CHUTNEY_PATH}"/tools/diagnostics.sh || true
|
||||
# XXXX These next two should be part of a make target.
|
||||
runcmd ls test_network_log || true
|
||||
runcmd head -n -0 test_network_log/* || true
|
||||
FAILED_TESTS="${FAILED_TESTS} chutney"
|
||||
fi
|
||||
end_section "Chutney"
|
||||
fi
|
||||
|
||||
if [[ "${STEM}" = "yes" ]]; then
|
||||
start_section "Stem"
|
||||
# 0.3.5 and onward have now disabled onion service v2 so we need to exclude
|
||||
# these Stem tests from now on.
|
||||
EXCLUDE_TESTS="--exclude-test control.controller.test_ephemeral_hidden_services_v2 --exclude-test control.controller.test_hidden_services_conf --exclude-test control.controller.test_with_ephemeral_hidden_services_basic_auth --exclude-test control.controller.test_without_ephemeral_hidden_services --exclude-test control.controller.test_with_ephemeral_hidden_services_basic_auth_no_credentials"
|
||||
if [[ "${TOR_VER_AT_LEAST_044}" = 'yes' ]]; then
|
||||
# XXXX This should probably be part of some test-stem make target.
|
||||
|
||||
# Disable the check around EXCLUDE_TESTS that requires double quote. We
|
||||
# need it to be expanded.
|
||||
# shellcheck disable=SC2086
|
||||
if runcmd timelimit -p -t 520 -s USR1 -T 30 -S ABRT \
|
||||
python3 "${STEM_PATH}/run_tests.py" \
|
||||
--tor src/app/tor \
|
||||
--integ --test control.controller \
|
||||
$EXCLUDE_TESTS \
|
||||
--test control.base_controller \
|
||||
--test process \
|
||||
--log TRACE \
|
||||
--log-file stem.log ; then
|
||||
hooray "Stem tests have succeeded"
|
||||
else
|
||||
error "Stem output:"
|
||||
runcmd tail -1000 "${STEM_PATH}"/test/data/tor_log
|
||||
runcmd grep -v "SocketClosed" stem.log | tail -1000
|
||||
FAILED_TESTS="${FAILED_TESTS} stem"
|
||||
fi
|
||||
else
|
||||
skipping "Stem: broken with <= 0.4.3. See bug tor#40077"
|
||||
fi
|
||||
end_section "Stem"
|
||||
fi
|
||||
|
||||
# TODO: Coverage
|
||||
|
||||
if [[ "${FAILED_TESTS}" != "" ]]; then
|
||||
die "Failed tests: ${FAILED_TESTS}"
|
||||
fi
|
||||
|
||||
hooray "Everything seems fine."
|
@ -1,9 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# apply.sh:
|
||||
# run spatch with appropriate includes and builtins for the Tor source code
|
||||
|
||||
top="$(dirname "$0")/../.."
|
||||
|
||||
spatch -macro_file_builtins "$top"/scripts/coccinelle/tor-coccinelle.h \
|
||||
-I "$top" -I "$top"/src -I "$top"/ext --defined COCCI "$@"
|
@ -1,23 +0,0 @@
|
||||
// Use calloc or realloc as appropriate instead of multiply-and-alloc
|
||||
|
||||
@malloc_to_calloc@
|
||||
identifier f =~ "(tor_malloc|tor_malloc_zero)";
|
||||
expression a;
|
||||
constant b;
|
||||
@@
|
||||
- f(a * b)
|
||||
+ tor_calloc(a, b)
|
||||
|
||||
@calloc_arg_order@
|
||||
expression a;
|
||||
type t;
|
||||
@@
|
||||
- tor_calloc(sizeof(t), a)
|
||||
+ tor_calloc(a, sizeof(t))
|
||||
|
||||
@realloc_to_reallocarray@
|
||||
expression a, b;
|
||||
expression p;
|
||||
@@
|
||||
- tor_realloc(p, a * b)
|
||||
+ tor_reallocarray(p, a, b)
|
@ -1,6 +0,0 @@
|
||||
@@
|
||||
expression n, d;
|
||||
@@
|
||||
|
||||
- (((n) + (d) - 1) / (d))
|
||||
+ CEIL_DIV(n, d)
|
@ -1,98 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# If we have coccinelle installed, run try_parse.sh on every filename passed
|
||||
# as an argument. If no filenames are supplied, scan a standard Tor 0.3.5 or
|
||||
# later directory layout.
|
||||
#
|
||||
# Uses the default coccinelle exceptions file, or $TOR_COCCI_EXCEPTIONS_FILE,
|
||||
# if it is set.
|
||||
#
|
||||
# Use TOR_COCCI_EXCEPTIONS_FILE=/dev/null check_cocci_parse.sh to disable
|
||||
# the default exception file.
|
||||
#
|
||||
# If spatch is not installed, remind the user to install it, but exit with
|
||||
# a success error status.
|
||||
|
||||
scripts_cocci="$(dirname "$0")"
|
||||
top="$scripts_cocci/../.."
|
||||
try_parse="$scripts_cocci/try_parse.sh"
|
||||
|
||||
exitcode=0
|
||||
|
||||
export TOR_COCCI_EXCEPTIONS_FILE="${TOR_COCCI_EXCEPTIONS_FILE:-$scripts_cocci/exceptions.txt}"
|
||||
|
||||
PURPOSE="cocci C parsing"
|
||||
|
||||
echo "Checking spatch:"
|
||||
|
||||
if ! command -v spatch ; then
|
||||
echo "Install coccinelle's spatch to check $PURPOSE."
|
||||
exit "$exitcode"
|
||||
fi
|
||||
|
||||
# Returns true if $1 is greater than or equal to $2
|
||||
version_ge()
|
||||
{
|
||||
if test "$1" = "$2" ; then
|
||||
# return true
|
||||
return 0
|
||||
fi
|
||||
LOWER_VERSION="$(printf '%s\n' "$1" "$2" | $SORT_V | head -n 1)"
|
||||
# implicit return
|
||||
test "$LOWER_VERSION" != "$1"
|
||||
}
|
||||
|
||||
# 'sort -V' is a gnu extension
|
||||
SORT_V="sort -V"
|
||||
# Use 'sort -n' if 'sort -V' doesn't work
|
||||
if ! version_ge "1" "0" ; then
|
||||
echo "Your 'sort -V' command appears broken. Falling back to 'sort -n'."
|
||||
echo "Some spatch version checks may give the wrong result."
|
||||
SORT_V="sort -n"
|
||||
fi
|
||||
|
||||
# Print the full spatch version, for diagnostics
|
||||
spatch --version
|
||||
|
||||
MIN_SPATCH_V="1.0.4"
|
||||
# This pattern needs to handle version strings like:
|
||||
# spatch version 1.0.0-rc19
|
||||
# spatch version 1.0.6 compiled with OCaml version 4.05.0
|
||||
SPATCH_V=$(spatch --version | head -1 | \
|
||||
sed 's/spatch version \([0-9][^ ]*\).*/\1/')
|
||||
|
||||
if ! version_ge "$SPATCH_V" "$MIN_SPATCH_V" ; then
|
||||
echo "Tor requires coccinelle spatch >= $MIN_SPATCH_V to check $PURPOSE."
|
||||
echo "But you have $SPATCH_V. Please install a newer version."
|
||||
exit "$exitcode"
|
||||
fi
|
||||
|
||||
if test $# -ge 1 ; then
|
||||
"$try_parse" "$@"
|
||||
exitcode=$?
|
||||
else
|
||||
cd "$top" || exit 1
|
||||
# This is the layout in 0.3.5
|
||||
# Keep these lists consistent:
|
||||
# - OWNED_TOR_C_FILES in Makefile.am
|
||||
# - CHECK_FILES in pre-commit.git-hook and pre-push.git-hook
|
||||
# - try_parse in check_cocci_parse.sh
|
||||
"$try_parse" \
|
||||
src/lib/*/*.[ch] \
|
||||
src/core/*/*.[ch] \
|
||||
src/feature/*/*.[ch] \
|
||||
src/app/*/*.[ch] \
|
||||
src/test/*.[ch] \
|
||||
src/test/*/*.[ch] \
|
||||
src/tools/*.[ch]
|
||||
exitcode=$?
|
||||
fi
|
||||
|
||||
if test "$exitcode" != 0 ; then
|
||||
echo "Please fix these $PURPOSE errors in the above files"
|
||||
echo "Set VERBOSE=1 for more details"
|
||||
echo "Try running test-operator-cleanup or 'make autostyle-operators'"
|
||||
echo "As a last resort, you can modify scripts/coccinelle/exceptions.txt"
|
||||
fi
|
||||
|
||||
exit "$exitcode"
|
@ -1,43 +0,0 @@
|
||||
// Script to clean up after ctrl-reply.cocci -- run as a separate step
|
||||
// because cleanup_write2 (even when disabled) somehow prevents the
|
||||
// match rule in ctrl-reply.cocci from matching.
|
||||
|
||||
// If it doesn't have to be a printf, turn it into a write
|
||||
|
||||
@ cleanup_write @
|
||||
expression E;
|
||||
constant code, s;
|
||||
@@
|
||||
-control_printf_endreply(E, code, s)
|
||||
+control_write_endreply(E, code, s)
|
||||
|
||||
// Use send_control_done() instead of explicitly writing it out
|
||||
@ cleanup_send_done @
|
||||
type T;
|
||||
identifier f != send_control_done;
|
||||
expression E;
|
||||
@@
|
||||
T f(...) {
|
||||
<...
|
||||
-control_write_endreply(E, 250, "OK")
|
||||
+send_control_done(E)
|
||||
...>
|
||||
}
|
||||
|
||||
// Clean up more printfs that could be writes
|
||||
//
|
||||
// For some reason, including this rule, even disabled, causes the
|
||||
// match rule in ctrl-reply.cocci to fail to match some code that has
|
||||
// %s in its format strings
|
||||
|
||||
@ cleanup_write2 @
|
||||
expression E1, E2;
|
||||
constant code;
|
||||
@@
|
||||
(
|
||||
-control_printf_endreply(E1, code, "%s", E2)
|
||||
+control_write_endreply(E1, code, E2)
|
||||
|
|
||||
-control_printf_midreply(E1, code, "%s", E2)
|
||||
+control_write_midreply(E1, code, E2)
|
||||
)
|
@ -1,87 +0,0 @@
|
||||
// Script to edit control_*.c for refactored control reply output functions
|
||||
|
||||
@ initialize:python @
|
||||
@@
|
||||
import re
|
||||
from coccilib.report import *
|
||||
|
||||
# reply strings "NNN-foo", "NNN+foo", "NNN foo", etc.
|
||||
r = re.compile(r'^"(\d+)([ +-])(.*)\\r\\n"$')
|
||||
|
||||
# Generate name of function to call based on which separator character
|
||||
# comes between the numeric code and the text
|
||||
def idname(sep, base):
|
||||
if sep == '+':
|
||||
return base + "datareply"
|
||||
elif sep == '-':
|
||||
return base + "midreply"
|
||||
else:
|
||||
return base + "endreply"
|
||||
|
||||
# Generate the actual replacements used by the rules
|
||||
def gen(s, base, p):
|
||||
pos = p[0]
|
||||
print_report(pos, "%s %s" % (base, s))
|
||||
m = r.match(s)
|
||||
if m is None:
|
||||
# String not correct format, so fail match
|
||||
cocci.include_match(False)
|
||||
print_report(pos, "BAD STRING %s" % s)
|
||||
return
|
||||
|
||||
code, sep, s1 = m.groups()
|
||||
|
||||
if r'\r\n' in s1:
|
||||
# Extra CRLF in string, so fail match
|
||||
cocci.include_match(False)
|
||||
print_report(pos, "extra CRLF in string %s" % s)
|
||||
return
|
||||
|
||||
coccinelle.code = code
|
||||
# Need a string that is a single C token, because Coccinelle only allows
|
||||
# "identifiers" to be output from Python scripts?
|
||||
coccinelle.body = '"%s"' % s1
|
||||
coccinelle.id = idname(sep, base)
|
||||
return
|
||||
|
||||
@ match @
|
||||
identifier f;
|
||||
position p;
|
||||
expression E;
|
||||
constant s;
|
||||
@@
|
||||
(
|
||||
connection_printf_to_buf@f@p(E, s, ...)
|
||||
|
|
||||
connection_write_str_to_buf@f@p(s, E)
|
||||
)
|
||||
|
||||
@ script:python sc1 @
|
||||
s << match.s;
|
||||
p << match.p;
|
||||
f << match.f;
|
||||
id;
|
||||
body;
|
||||
code;
|
||||
@@
|
||||
if f == 'connection_printf_to_buf':
|
||||
gen(s, 'control_printf_', p)
|
||||
elif f == 'connection_write_str_to_buf':
|
||||
gen(s, 'control_write_', p)
|
||||
else:
|
||||
raise(ValueError("%s: %s" % (f, s)))
|
||||
|
||||
@ replace @
|
||||
constant match.s;
|
||||
expression match.E;
|
||||
identifier match.f;
|
||||
identifier sc1.body, sc1.id, sc1.code;
|
||||
@@
|
||||
(
|
||||
-connection_write_str_to_buf@f(s, E)
|
||||
+id(E, code, body)
|
||||
|
|
||||
-connection_printf_to_buf@f(E, s
|
||||
+id(E, code, body
|
||||
, ...)
|
||||
)
|
@ -1,29 +0,0 @@
|
||||
// Look for use of expressions with side-effects inside of debug logs.
|
||||
//
|
||||
// This script detects expressions like ++E, --E, E++, and E-- inside of
|
||||
// calls to log_debug().
|
||||
//
|
||||
// The log_debug() macro exits early if debug logging is not enabled,
|
||||
// potentially causing problems if its arguments have side-effects.
|
||||
|
||||
@@
|
||||
expression E;
|
||||
@@
|
||||
*log_debug(... , <+... --E ...+>, ... );
|
||||
|
||||
|
||||
@@
|
||||
expression E;
|
||||
@@
|
||||
*log_debug(... , <+... ++E ...+>, ... );
|
||||
|
||||
@@
|
||||
expression E;
|
||||
@@
|
||||
*log_debug(... , <+... E-- ...+>, ... );
|
||||
|
||||
|
||||
@@
|
||||
expression E;
|
||||
@@
|
||||
*log_debug(... , <+... E++ ...+>, ... );
|
@ -1,24 +0,0 @@
|
||||
# A list of exception patterns for check_cocci_parse.sh
|
||||
# Passed to 'grep -f'
|
||||
src/lib/cc/compat_compiler.h
|
||||
src/lib/container/handles.h
|
||||
src/lib/container/map.c
|
||||
src/lib/container/map.h
|
||||
src/lib/container/order.c
|
||||
src/lib/crypt_ops/crypto_rand.c
|
||||
src/lib/fs/files.h
|
||||
src/lib/log/util_bug.c
|
||||
src/lib/pubsub/pubsub_macros.h
|
||||
src/lib/smartlist_core/smartlist_foreach.h
|
||||
src/lib/testsupport/testsupport.h
|
||||
src/lib/tls/tortls.h
|
||||
src/lib/tls/tortls_openssl.c
|
||||
src/lib/tls/x509.h
|
||||
src/lib/version/version.c
|
||||
src/core/mainloop/connection.c
|
||||
src/core/or/reasons.c
|
||||
src/feature/dirclient/dirclient.c
|
||||
src/feature/nodelist/networkstatus.c
|
||||
src/test/test_address.c
|
||||
src/test/test_hs_cache.c
|
||||
src/test/test_hs_descriptor.c
|
@ -1,38 +0,0 @@
|
||||
@cast_malloc@
|
||||
expression e;
|
||||
type T;
|
||||
@@
|
||||
- (T *)tor_malloc(e)
|
||||
+ tor_malloc(e)
|
||||
|
||||
@cast_malloc_zero@
|
||||
expression e;
|
||||
type T;
|
||||
identifier func;
|
||||
@@
|
||||
- (T *)tor_malloc_zero(e)
|
||||
+ tor_malloc_zero(e)
|
||||
|
||||
@cast_calloc@
|
||||
expression a, b;
|
||||
type T;
|
||||
identifier func;
|
||||
@@
|
||||
- (T *)tor_calloc(a, b)
|
||||
+ tor_calloc(a, b)
|
||||
|
||||
@cast_realloc@
|
||||
expression e;
|
||||
expression p;
|
||||
type T;
|
||||
@@
|
||||
- (T *)tor_realloc(p, e)
|
||||
+ tor_realloc(p, e)
|
||||
|
||||
@cast_reallocarray@
|
||||
expression a,b;
|
||||
expression p;
|
||||
type T;
|
||||
@@
|
||||
- (T *)tor_reallocarray(p, a, b)
|
||||
+ tor_reallocarray(p, a, b)
|
@ -1,24 +0,0 @@
|
||||
#!/usr/bin/perl -w -p -i
|
||||
#
|
||||
# Copyright (c) 2001 Matej Pfajfar.
|
||||
# Copyright (c) 2001-2004, Roger Dingledine.
|
||||
# Copyright (c) 2004-2006, Roger Dingledine, Nick Mathewson.
|
||||
# Copyright (c) 2007-2019, The Tor Project, Inc.
|
||||
# See LICENSE for licensing information
|
||||
|
||||
# This script looks for instances of C comparison operators as macro arguments,
|
||||
# and replaces them with our OP_* equivalents.
|
||||
#
|
||||
# Some macros that take operators are our tt_int_op() testing macro, and the
|
||||
# standard timercmp() macro. Coccinelle can't handle their syntax, however,
|
||||
# unless we give them their operators as a macro too.
|
||||
|
||||
next if m#^ */\*# or m#^ *\* #;
|
||||
|
||||
s/<([,)])/OP_LT$1/;
|
||||
s/(?<=[\s,])>([,)])/OP_GT$1/;
|
||||
#s/>([,)])/OP_GT$1/;
|
||||
s/==([,)])/OP_EQ$1/;
|
||||
s/>=([,)])/OP_GE$1/;
|
||||
s/<=([,)])/OP_LE$1/;
|
||||
s/!=([,)])/OP_NE$1/;
|
@ -1,49 +0,0 @@
|
||||
@@
|
||||
int e;
|
||||
constant c;
|
||||
@@
|
||||
|
||||
(
|
||||
- tt_assert(e == c)
|
||||
+ tt_int_op(e, OP_EQ, c)
|
||||
|
|
||||
- tt_assert(e != c)
|
||||
+ tt_int_op(e, OP_NE, c)
|
||||
|
|
||||
- tt_assert(e < c)
|
||||
+ tt_int_op(e, OP_LT, c)
|
||||
|
|
||||
- tt_assert(e <= c)
|
||||
+ tt_int_op(e, OP_LE, c)
|
||||
|
|
||||
- tt_assert(e > c)
|
||||
+ tt_int_op(e, OP_GT, c)
|
||||
|
|
||||
- tt_assert(e >= c)
|
||||
+ tt_int_op(e, OP_GE, c)
|
||||
)
|
||||
|
||||
@@
|
||||
unsigned int e;
|
||||
constant c;
|
||||
@@
|
||||
|
||||
(
|
||||
- tt_assert(e == c)
|
||||
+ tt_uint_op(e, OP_EQ, c)
|
||||
|
|
||||
- tt_assert(e != c)
|
||||
+ tt_uint_op(e, OP_NE, c)
|
||||
|
|
||||
- tt_assert(e < c)
|
||||
+ tt_uint_op(e, OP_LT, c)
|
||||
|
|
||||
- tt_assert(e <= c)
|
||||
+ tt_uint_op(e, OP_LE, c)
|
||||
|
|
||||
- tt_assert(e > c)
|
||||
+ tt_uint_op(e, OP_GT, c)
|
||||
|
|
||||
- tt_assert(e >= c)
|
||||
+ tt_uint_op(e, OP_GE, c)
|
||||
)
|
@ -1,11 +0,0 @@
|
||||
@@
|
||||
expression * e;
|
||||
@@
|
||||
|
||||
(
|
||||
- tt_assert(e != NULL)
|
||||
+ tt_ptr_op(e, OP_NE, NULL)
|
||||
|
|
||||
- tt_assert(e == NULL)
|
||||
+ tt_ptr_op(e, OP_EQ, NULL)
|
||||
)
|
@ -1,5 +0,0 @@
|
||||
@@
|
||||
@@
|
||||
|
||||
- tt_assert(0)
|
||||
+ tt_abort()
|
@ -1,63 +0,0 @@
|
||||
/* Copyright (c) 2001 Matej Pfajfar.
|
||||
* Copyright (c) 2001-2004, Roger Dingledine.
|
||||
* Copyright (c) 2004-2006, Roger Dingledine, Nick Mathewson.
|
||||
* Copyright (c) 2007-2019, The Tor Project, Inc. */
|
||||
/* See LICENSE for licensing information */
|
||||
|
||||
/*
|
||||
* This file looks like a C header, but its purpose is a bit different.
|
||||
*
|
||||
* We never include it from our real C files; we only tell Coccinelle
|
||||
* about it in apply.sh.
|
||||
*
|
||||
* It tells the Coccinelle semantic patching tool how to understand
|
||||
* things that would otherwise not be good C syntax, or which would
|
||||
* otherwise not make sense to it as C. It doesn't need to produce
|
||||
* semantically equivalent C, or even correct C: it only has to produce
|
||||
* syntactically valid C.
|
||||
*/
|
||||
|
||||
#define MOCK_DECL(a, b, c) a b c
|
||||
#define MOCK_IMPL(a, b, c) a b c
|
||||
#define CHECK_PRINTF(a, b)
|
||||
#define CHECK_SCANF(a, b)
|
||||
#define STATIC static
|
||||
#define EXTERN(a,b) extern a b;
|
||||
|
||||
#define STMT_BEGIN do {
|
||||
#define STMT_END } while (0)
|
||||
|
||||
#define BUG(x) (x)
|
||||
#define IF_BUG_ONCE(x) if (x)
|
||||
|
||||
#define ATTR_NORETURN
|
||||
#define ATTR_UNUSED
|
||||
#define ATTR_CONST
|
||||
#define ATTR_MALLOC
|
||||
#define ATTR_WUR
|
||||
#define DISABLE_GCC_WARNING(x)
|
||||
#define ENABLE_GCC_WARNING(x)
|
||||
|
||||
#define HANDLE_DECL(a,b,c)
|
||||
#define HANDLE_IMPL(a,b,c)
|
||||
#define HT_ENTRY(x) void *
|
||||
#define HT_HEAD(a,b) struct ht_head
|
||||
#define HT_INITIALIZER() { }
|
||||
#define X509 struct x509_st
|
||||
#define STACK_OF(x) struct foo_stack_t
|
||||
#define TOR_TAILQ_HEAD(a,b) struct tailq_head
|
||||
#define TOR_TAILQ_ENTRY(a) struct tailq_entry
|
||||
#define TOR_SIMPLEQ_HEAD(a,b) struct simpleq_entry
|
||||
#define TOR_SIMPLEQ_ENTRY(a) struct simpleq_entry
|
||||
#define TOR_LIST_HEAD(a,b) struct list_head
|
||||
#define TOR_LIST_ENTRY(a) struct list_entry
|
||||
#define TOR_SLIST_HEAD(a,b) struct slist_head
|
||||
#define TOR_SLIST_ENTRY(a) struct slist_entry
|
||||
|
||||
#define NS_DECL(a, b, c) a b c
|
||||
#define NS(a) a
|
||||
|
||||
#define CONF_TEST_MEMBERS(a,b,c)
|
||||
#define DUMMY_CONF_TEST_MEMBERS
|
||||
|
||||
#define EAT_SEMICOLON extern int dummy__;
|
@ -1,46 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Echo the name of every argument of this script that is not "perfect"
|
||||
# according to coccinelle's --parse-c.
|
||||
#
|
||||
# If $TOR_COCCI_EXCEPTIONS_FILE is non-empty, skip any files that match the
|
||||
# patterns in the exception file, according to "grep -f"
|
||||
#
|
||||
# If VERBOSE is non-empty, log spatch errors and skipped files.
|
||||
|
||||
top="$(dirname "$0")/../.."
|
||||
|
||||
exitcode=0
|
||||
|
||||
for fn in "$@"; do
|
||||
|
||||
if test "${TOR_COCCI_EXCEPTIONS_FILE}" ; then
|
||||
skip_fn=$(echo "$fn" | grep -f "${TOR_COCCI_EXCEPTIONS_FILE}")
|
||||
if test "${skip_fn}" ; then
|
||||
if test "${VERBOSE}" != ""; then
|
||||
echo "Skipping '${skip_fn}'"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
|
||||
if spatch --macro-file-builtins \
|
||||
"$top"/scripts/coccinelle/tor-coccinelle.h \
|
||||
--defined COCCI \
|
||||
--parse-c "$fn" \
|
||||
2>/dev/null | grep "perfect = 1" > /dev/null; then
|
||||
: # it's perfect
|
||||
else
|
||||
echo "$fn"
|
||||
if test "${VERBOSE}" != ""; then
|
||||
spatch --macro-file-builtins \
|
||||
"$top"/scripts/coccinelle/tor-coccinelle.h \
|
||||
--defined COCCI \
|
||||
--parse-c "$fn"
|
||||
fi
|
||||
exitcode=1
|
||||
fi
|
||||
|
||||
done
|
||||
|
||||
exit "$exitcode"
|
@ -1,13 +0,0 @@
|
||||
|
||||
@@
|
||||
expression a;
|
||||
@@
|
||||
- tor_calloc(1, a)
|
||||
+ tor_malloc_zero(a)
|
||||
|
||||
@@
|
||||
expression a;
|
||||
@@
|
||||
- tor_calloc(a, 1)
|
||||
+ tor_malloc_zero(a)
|
||||
|
@ -1,161 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Future imports for Python 2.7, mandatory in 3.0
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
FUZZERS = """
|
||||
address
|
||||
addressPTR
|
||||
consensus
|
||||
descriptor
|
||||
diff
|
||||
diff-apply
|
||||
extrainfo
|
||||
hsdescv3
|
||||
hsdescv3-inner
|
||||
hsdescv3-middle
|
||||
http
|
||||
http-connect
|
||||
microdesc
|
||||
socks
|
||||
strops
|
||||
vrs
|
||||
"""
|
||||
|
||||
|
||||
PREAMBLE = r"""
|
||||
FUZZING_CPPFLAGS = \
|
||||
$(src_test_AM_CPPFLAGS) $(TEST_CPPFLAGS)
|
||||
FUZZING_CFLAGS = \
|
||||
$(AM_CFLAGS) $(TEST_CFLAGS)
|
||||
FUZZING_LDFLAG = \
|
||||
@TOR_LDFLAGS_zlib@ $(TOR_LDFLAGS_CRYPTLIB) @TOR_LDFLAGS_libevent@
|
||||
FUZZING_LIBS = \
|
||||
src/test/libtor-testing.a \
|
||||
@TOR_ZLIB_LIBS@ @TOR_LIB_MATH@ \
|
||||
@TOR_LIBEVENT_LIBS@ $(TOR_LIBS_CRYPTLIB) \
|
||||
@TOR_LIB_WS32@ @TOR_LIB_IPHLPAPI@ @TOR_LIB_SHLWAPI@ @TOR_LIB_GDI@ @TOR_LIB_USERENV@ @CURVE25519_LIBS@ \
|
||||
@TOR_SYSTEMD_LIBS@ \
|
||||
@TOR_LZMA_LIBS@ \
|
||||
@TOR_ZSTD_LIBS@ \
|
||||
@TOR_TRACE_LIBS@
|
||||
|
||||
oss-fuzz-prereqs: \
|
||||
src/test/libtor-testing.a
|
||||
|
||||
noinst_HEADERS += \
|
||||
src/test/fuzz/fuzzing.h
|
||||
|
||||
LIBFUZZER_CPPFLAGS = $(FUZZING_CPPFLAGS) -DLLVM_FUZZ
|
||||
LIBFUZZER_CFLAGS = $(FUZZING_CFLAGS)
|
||||
LIBFUZZER_LDFLAG = $(FUZZING_LDFLAG) -fsanitize=fuzzer
|
||||
LIBFUZZER_LIBS = $(FUZZING_LIBS) -lstdc++
|
||||
|
||||
LIBOSS_FUZZ_CPPFLAGS = $(FUZZING_CPPFLAGS) -DLLVM_FUZZ
|
||||
LIBOSS_FUZZ_CFLAGS = $(FUZZING_CFLAGS)
|
||||
"""
|
||||
|
||||
POSTAMBLE = r"""
|
||||
noinst_PROGRAMS += $(FUZZERS) $(LIBFUZZER_FUZZERS)
|
||||
noinst_LIBRARIES += $(OSS_FUZZ_FUZZERS)
|
||||
oss-fuzz-fuzzers: oss-fuzz-prereqs $(OSS_FUZZ_FUZZERS)
|
||||
fuzzers: $(FUZZERS) $(LIBFUZZER_FUZZERS)
|
||||
|
||||
test-fuzz-corpora: $(FUZZERS)
|
||||
$(top_srcdir)/src/test/fuzz_static_testcases.sh
|
||||
"""
|
||||
|
||||
########### No user serviceable parts will follow.
|
||||
|
||||
PREAMBLE = PREAMBLE.strip()
|
||||
POSTAMBLE = POSTAMBLE.strip() # If I use it, it's a word!
|
||||
FUZZERS = FUZZERS.split()
|
||||
FUZZERS.sort()
|
||||
|
||||
WARNING = """
|
||||
# This file was generated by fuzzing_include_am.py; do not hand-edit unless
|
||||
# you enjoy having your changes erased.
|
||||
""".strip()
|
||||
|
||||
print(WARNING)
|
||||
|
||||
print(PREAMBLE)
|
||||
|
||||
print("\n# ===== AFL fuzzers")
|
||||
|
||||
def get_id_name(s):
|
||||
return s.replace("-", "_")
|
||||
|
||||
for fuzzer in FUZZERS:
|
||||
idname = get_id_name(fuzzer)
|
||||
print("""\
|
||||
if UNITTESTS_ENABLED
|
||||
src_test_fuzz_fuzz_{name}_SOURCES = \\
|
||||
src/test/fuzz/fuzzing_common.c \\
|
||||
src/test/fuzz/fuzz_{name}.c
|
||||
src_test_fuzz_fuzz_{name}_CPPFLAGS = $(FUZZING_CPPFLAGS)
|
||||
src_test_fuzz_fuzz_{name}_CFLAGS = $(FUZZING_CFLAGS)
|
||||
src_test_fuzz_fuzz_{name}_LDFLAGS = $(FUZZING_LDFLAG)
|
||||
src_test_fuzz_fuzz_{name}_LDADD = $(FUZZING_LIBS)
|
||||
endif
|
||||
""".format(name=idname))
|
||||
|
||||
print("if UNITTESTS_ENABLED")
|
||||
print("FUZZERS = \\")
|
||||
print(" \\\n".join("\tsrc/test/fuzz/fuzz-{name}".format(name=fuzzer)
|
||||
for fuzzer in FUZZERS))
|
||||
print("endif")
|
||||
|
||||
print("\n# ===== libfuzzer")
|
||||
print("\nif LIBFUZZER_ENABLED")
|
||||
|
||||
for fuzzer in FUZZERS:
|
||||
idname = get_id_name(fuzzer)
|
||||
print("""\
|
||||
if UNITTESTS_ENABLED
|
||||
src_test_fuzz_lf_fuzz_{name}_SOURCES = \\
|
||||
$(src_test_fuzz_fuzz_{name}_SOURCES)
|
||||
src_test_fuzz_lf_fuzz_{name}_CPPFLAGS = $(LIBFUZZER_CPPFLAGS)
|
||||
src_test_fuzz_lf_fuzz_{name}_CFLAGS = $(LIBFUZZER_CFLAGS)
|
||||
src_test_fuzz_lf_fuzz_{name}_LDFLAGS = $(LIBFUZZER_LDFLAG)
|
||||
src_test_fuzz_lf_fuzz_{name}_LDADD = $(LIBFUZZER_LIBS)
|
||||
endif
|
||||
""".format(name=idname))
|
||||
|
||||
print("LIBFUZZER_FUZZERS = \\")
|
||||
print(" \\\n".join("\tsrc/test/fuzz/lf-fuzz-{name}".format(name=fuzzer)
|
||||
for fuzzer in FUZZERS))
|
||||
|
||||
print("""
|
||||
else
|
||||
LIBFUZZER_FUZZERS =
|
||||
endif""")
|
||||
|
||||
print("\n# ===== oss-fuzz\n")
|
||||
print("if OSS_FUZZ_ENABLED")
|
||||
|
||||
for fuzzer in FUZZERS:
|
||||
idname = get_id_name(fuzzer)
|
||||
print("""\
|
||||
if UNITTESTS_ENABLED
|
||||
src_test_fuzz_liboss_fuzz_{name}_a_SOURCES = \\
|
||||
$(src_test_fuzz_fuzz_{name}_SOURCES)
|
||||
src_test_fuzz_liboss_fuzz_{name}_a_CPPFLAGS = $(LIBOSS_FUZZ_CPPFLAGS)
|
||||
src_test_fuzz_liboss_fuzz_{name}_a_CFLAGS = $(LIBOSS_FUZZ_CFLAGS)
|
||||
endif
|
||||
""".format(name=idname))
|
||||
|
||||
print("OSS_FUZZ_FUZZERS = \\")
|
||||
print(" \\\n".join("\tsrc/test/fuzz/liboss-fuzz-{name}.a".format(name=fuzzer)
|
||||
for fuzzer in FUZZERS))
|
||||
|
||||
print("""
|
||||
else
|
||||
OSS_FUZZ_FUZZERS =
|
||||
endif""")
|
||||
|
||||
print("")
|
||||
|
||||
print(POSTAMBLE)
|
@ -1,37 +0,0 @@
|
||||
#!/usr/bin/perl -w
|
||||
|
||||
use strict;
|
||||
my %syscalls = ();
|
||||
|
||||
while (<>) {
|
||||
if (/^#define (__NR_\w+) /) {
|
||||
$syscalls{$1} = 1;
|
||||
}
|
||||
}
|
||||
|
||||
print <<EOL;
|
||||
/* Automatically generated with
|
||||
gen_linux_syscalls.pl /usr/include/asm/unistd*.h
|
||||
Do not edit.
|
||||
*/
|
||||
static const struct {
|
||||
int syscall_num; const char *syscall_name;
|
||||
} SYSCALLS_BY_NUMBER[] = {
|
||||
EOL
|
||||
|
||||
for my $k (sort keys %syscalls) {
|
||||
my $name = $k;
|
||||
$name =~ s/^__NR_//;
|
||||
print <<EOL;
|
||||
#ifdef $k
|
||||
{ $k, "$name" },
|
||||
#endif
|
||||
EOL
|
||||
|
||||
}
|
||||
|
||||
print <<EOL
|
||||
{0, NULL}
|
||||
};
|
||||
|
||||
EOL
|
@ -1,136 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2014-2019, The Tor Project, Inc
|
||||
# See LICENSE for licensing information
|
||||
|
||||
# This script parses openssl headers to find ciphersuite names, determines
|
||||
# which ones we should be willing to use as a server, and sorts them according
|
||||
# to preference rules.
|
||||
#
|
||||
# Run it on all the files in your openssl include directory.
|
||||
|
||||
# Future imports for Python 2.7, mandatory in 3.0
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
EPHEMERAL_INDICATORS = [ "_EDH_", "_DHE_", "_ECDHE_" ]
|
||||
BAD_STUFF = [ "_DES_40_", "MD5", "_RC4_", "_DES_64_",
|
||||
"_SEED_", "_CAMELLIA_", "_NULL",
|
||||
"_CCM_8", "_DES_", ]
|
||||
|
||||
# these never get #ifdeffed.
|
||||
MANDATORY = [
|
||||
"TLS1_TXT_DHE_RSA_WITH_AES_256_SHA",
|
||||
"TLS1_TXT_DHE_RSA_WITH_AES_128_SHA",
|
||||
]
|
||||
|
||||
def find_ciphers(filename):
|
||||
with open(filename) as f:
|
||||
for line in f:
|
||||
m = re.search(r'(?:SSL3|TLS1)_TXT_\w+', line)
|
||||
if m:
|
||||
yield m.group(0)
|
||||
|
||||
def usable_cipher(ciph):
|
||||
ephemeral = False
|
||||
for e in EPHEMERAL_INDICATORS:
|
||||
if e in ciph:
|
||||
ephemeral = True
|
||||
if not ephemeral:
|
||||
return False
|
||||
|
||||
if "_RSA_" not in ciph:
|
||||
return False
|
||||
|
||||
for b in BAD_STUFF:
|
||||
if b in ciph:
|
||||
return False
|
||||
return True
|
||||
|
||||
# All fields we sort on, in order of priority.
|
||||
FIELDS = [ 'cipher', 'fwsec', 'mode', 'digest', 'bitlength' ]
|
||||
# Map from sorted fields to recognized value in descending order of goodness
|
||||
FIELD_VALS = { 'cipher' : [ 'AES', 'CHACHA20' ],
|
||||
'fwsec' : [ 'ECDHE', 'DHE' ],
|
||||
'mode' : [ 'POLY1305', 'GCM', 'CCM', 'CBC', ],
|
||||
'digest' : [ 'n/a', 'SHA384', 'SHA256', 'SHA', ],
|
||||
'bitlength' : [ '256', '128', '192' ],
|
||||
}
|
||||
|
||||
class Ciphersuite(object):
|
||||
def __init__(self, name, fwsec, cipher, bitlength, mode, digest):
|
||||
if fwsec == 'EDH':
|
||||
fwsec = 'DHE'
|
||||
|
||||
if mode in [ '_CBC3', '_CBC', '' ]:
|
||||
mode = 'CBC'
|
||||
elif mode == '_GCM':
|
||||
mode = 'GCM'
|
||||
|
||||
self.name = name
|
||||
self.fwsec = fwsec
|
||||
self.cipher = cipher
|
||||
self.bitlength = bitlength
|
||||
self.mode = mode
|
||||
self.digest = digest
|
||||
|
||||
for f in FIELDS:
|
||||
assert(getattr(self, f) in FIELD_VALS[f])
|
||||
|
||||
def sort_key(self):
|
||||
return tuple(FIELD_VALS[f].index(getattr(self,f)) for f in FIELDS)
|
||||
|
||||
|
||||
def parse_cipher(ciph):
|
||||
m = re.match('(?:TLS1|SSL3)_TXT_(EDH|DHE|ECDHE)_RSA(?:_WITH)?_(AES|DES)_(256|128|192)(|_CBC|_CBC3|_GCM)_(SHA|SHA256|SHA384)$', ciph)
|
||||
|
||||
if m:
|
||||
fwsec, cipher, bits, mode, digest = m.groups()
|
||||
return Ciphersuite(ciph, fwsec, cipher, bits, mode, digest)
|
||||
|
||||
m = re.match('(?:TLS1|SSL3)_TXT_(EDH|DHE|ECDHE)_RSA(?:_WITH)?_(AES|DES)_(256|128|192)_CCM', ciph)
|
||||
if m:
|
||||
fwsec, cipher, bits = m.groups()
|
||||
return Ciphersuite(ciph, fwsec, cipher, bits, "CCM", "n/a")
|
||||
|
||||
m = re.match('(?:TLS1|SSL3)_TXT_(EDH|DHE|ECDHE)_RSA(?:_WITH)?_CHACHA20_POLY1305', ciph)
|
||||
if m:
|
||||
fwsec, = m.groups()
|
||||
return Ciphersuite(ciph, fwsec, "CHACHA20", "256", "POLY1305", "n/a")
|
||||
|
||||
print("/* Couldn't parse %s ! */"%ciph)
|
||||
return None
|
||||
|
||||
|
||||
ALL_CIPHERS = []
|
||||
|
||||
for fname in sys.argv[1:]:
|
||||
for c in find_ciphers(fname):
|
||||
if usable_cipher(c):
|
||||
parsed = parse_cipher(c)
|
||||
if parsed != None:
|
||||
ALL_CIPHERS.append(parsed)
|
||||
|
||||
ALL_CIPHERS.sort(key=Ciphersuite.sort_key)
|
||||
|
||||
indent = " "*7
|
||||
|
||||
for c in ALL_CIPHERS:
|
||||
if c is ALL_CIPHERS[-1]:
|
||||
colon = ''
|
||||
else:
|
||||
colon = ' ":"'
|
||||
|
||||
if c.name in MANDATORY:
|
||||
print("%s/* Required */"%indent)
|
||||
print('%s%s%s'%(indent,c.name,colon))
|
||||
else:
|
||||
print("#ifdef %s"%c.name)
|
||||
print('%s%s%s'%(indent,c.name,colon))
|
||||
print("#endif")
|
||||
|
||||
print('%s;'%indent)
|
||||
|
@ -1,218 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# coding=utf-8
|
||||
# Copyright 2011-2019, The Tor Project, Inc
|
||||
# original version by Arturo Filastò
|
||||
# See LICENSE for licensing information
|
||||
|
||||
# This script parses Firefox and OpenSSL sources, and uses this information
|
||||
# to generate a ciphers.inc file.
|
||||
#
|
||||
# It takes two arguments: the location of a firefox source directory, and the
|
||||
# location of an openssl source directory.
|
||||
|
||||
# Future imports for Python 2.7, mandatory in 3.0
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
if len(sys.argv) != 3:
|
||||
print("Syntax: get_mozilla_ciphers.py <firefox-source-dir> <openssl-source-dir>", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
ff_root = sys.argv[1]
|
||||
ossl_root = sys.argv[2]
|
||||
|
||||
def ff(s):
|
||||
return os.path.join(ff_root, s)
|
||||
def ossl(s):
|
||||
return os.path.join(ossl_root, s)
|
||||
|
||||
#####
|
||||
# Read the cpp file to understand what Ciphers map to what name :
|
||||
# Make "ciphers" a map from name used in the javascript to a cipher macro name
|
||||
fileA = open(ff('security/manager/ssl/nsNSSComponent.cpp'),'r')
|
||||
|
||||
# The input format is a file containing exactly one section of the form:
|
||||
# static CipherPref CipherPrefs[] = {
|
||||
# {"name", MACRO_NAME}, // comment
|
||||
# ...
|
||||
# {NULL, 0}
|
||||
# }
|
||||
|
||||
inCipherSection = False
|
||||
cipherLines = []
|
||||
for line in fileA:
|
||||
if line.startswith('static const CipherPref sCipherPrefs[]'):
|
||||
# Get the starting boundary of the Cipher Preferences
|
||||
inCipherSection = True
|
||||
elif inCipherSection:
|
||||
line = line.strip()
|
||||
if line.startswith('{ nullptr, 0}'):
|
||||
# At the ending boundary of the Cipher Prefs
|
||||
break
|
||||
else:
|
||||
cipherLines.append(line)
|
||||
fileA.close()
|
||||
|
||||
# Parse the lines and put them into a dict
|
||||
ciphers = {}
|
||||
cipher_pref = {}
|
||||
key_pending = None
|
||||
for line in cipherLines:
|
||||
m = re.search(r'^{\s*\"([^\"]+)\",\s*(\S+)\s*(?:,\s*(true|false))?\s*}', line)
|
||||
if m:
|
||||
assert not key_pending
|
||||
key,value,enabled = m.groups()
|
||||
if enabled == 'true':
|
||||
ciphers[key] = value
|
||||
cipher_pref[value] = key
|
||||
continue
|
||||
m = re.search(r'^{\s*\"([^\"]+)\",', line)
|
||||
if m:
|
||||
assert not key_pending
|
||||
key_pending = m.group(1)
|
||||
continue
|
||||
m = re.search(r'^\s*(\S+)(?:,\s*(true|false))+\s*}', line)
|
||||
if m:
|
||||
assert key_pending
|
||||
key = key_pending
|
||||
value,enabled = m.groups()
|
||||
key_pending = None
|
||||
if enabled == 'true':
|
||||
ciphers[key] = value
|
||||
cipher_pref[value] = key
|
||||
|
||||
####
|
||||
# Now find the correct order for the ciphers
|
||||
fileC = open(ff('security/nss/lib/ssl/ssl3con.c'), 'r')
|
||||
firefox_ciphers = []
|
||||
inEnum=False
|
||||
for line in fileC:
|
||||
if not inEnum:
|
||||
if "ssl3CipherSuiteCfg cipherSuites[" in line:
|
||||
inEnum = True
|
||||
continue
|
||||
|
||||
if line.startswith("};"):
|
||||
break
|
||||
|
||||
m = re.match(r'^\s*\{\s*([A-Z_0-9]+),', line)
|
||||
if m:
|
||||
firefox_ciphers.append(m.group(1))
|
||||
|
||||
fileC.close()
|
||||
|
||||
#####
|
||||
# Read the JS file to understand what ciphers are enabled. The format is
|
||||
# pref("name", true/false);
|
||||
# Build a map enabled_ciphers from javascript name to "true" or "false",
|
||||
# and an (unordered!) list of the macro names for those ciphers that are
|
||||
# enabled.
|
||||
fileB = open(ff('netwerk/base/security-prefs.js'), 'r')
|
||||
|
||||
enabled_ciphers = {}
|
||||
for line in fileB:
|
||||
m = re.match(r'pref\(\"([^\"]+)\"\s*,\s*(\S*)\s*\)', line)
|
||||
if not m:
|
||||
continue
|
||||
key, val = m.groups()
|
||||
if key.startswith("security.ssl3"):
|
||||
enabled_ciphers[key] = val
|
||||
fileB.close()
|
||||
|
||||
used_ciphers = []
|
||||
for k, v in enabled_ciphers.items():
|
||||
if v == "true":
|
||||
used_ciphers.append(ciphers[k])
|
||||
|
||||
#oSSLinclude = ('/usr/include/openssl/ssl3.h', '/usr/include/openssl/ssl.h',
|
||||
# '/usr/include/openssl/ssl2.h', '/usr/include/openssl/ssl23.h',
|
||||
# '/usr/include/openssl/tls1.h')
|
||||
oSSLinclude = ['ssl3.h', 'ssl.h'
|
||||
'ssl2.h', 'ssl23.h',
|
||||
'tls1.h']
|
||||
|
||||
#####
|
||||
# This reads the hex code for the ciphers that are used by firefox.
|
||||
# sslProtoD is set to a map from macro name to macro value in sslproto.h;
|
||||
# cipher_codes is set to an (unordered!) list of these hex values.
|
||||
sslProto = open(ff('security/nss/lib/ssl/sslproto.h'), 'r')
|
||||
sslProtoD = {}
|
||||
|
||||
for line in sslProto:
|
||||
m = re.match('#define\s+(\S+)\s+(\S+)', line)
|
||||
if m:
|
||||
key, value = m.groups()
|
||||
sslProtoD[key] = value
|
||||
sslProto.close()
|
||||
|
||||
cipher_codes = []
|
||||
for x in used_ciphers:
|
||||
cipher_codes.append(sslProtoD[x].lower())
|
||||
|
||||
####
|
||||
# Now read through all the openssl include files, and try to find the openssl
|
||||
# macro names for those files.
|
||||
openssl_macro_by_hex = {}
|
||||
all_openssl_macros = {}
|
||||
for fl in oSSLinclude:
|
||||
fname = ossl("include/openssl/"+fl)
|
||||
if not os.path.exists(fname):
|
||||
continue
|
||||
fp = open(fname, 'r')
|
||||
for line in fp.readlines():
|
||||
m = re.match('# *define\s+(\S+)\s+(\S+)', line)
|
||||
if m:
|
||||
value,key = m.groups()
|
||||
if key.startswith('0x') and "_CK_" in value:
|
||||
key = key.replace('0x0300','0x').lower()
|
||||
#print "%s %s" % (key, value)
|
||||
openssl_macro_by_hex[key] = value
|
||||
all_openssl_macros[value]=key
|
||||
fp.close()
|
||||
|
||||
# Now generate the output.
|
||||
print("""\
|
||||
/* This is an include file used to define the list of ciphers clients should
|
||||
* advertise. Before including it, you should define the CIPHER and XCIPHER
|
||||
* macros.
|
||||
*
|
||||
* This file was automatically generated by get_mozilla_ciphers.py.
|
||||
*/""")
|
||||
# Go in order by the order in CipherPrefs
|
||||
for firefox_macro in firefox_ciphers:
|
||||
|
||||
try:
|
||||
js_cipher_name = cipher_pref[firefox_macro]
|
||||
except KeyError:
|
||||
# This one has no javascript preference.
|
||||
continue
|
||||
|
||||
# The cipher needs to be enabled in security-prefs.js
|
||||
if enabled_ciphers.get(js_cipher_name, 'false') != 'true':
|
||||
continue
|
||||
|
||||
hexval = sslProtoD[firefox_macro].lower()
|
||||
|
||||
try:
|
||||
openssl_macro = openssl_macro_by_hex[hexval.lower()]
|
||||
openssl_macro = openssl_macro.replace("_CK_", "_TXT_")
|
||||
if openssl_macro not in all_openssl_macros:
|
||||
raise KeyError()
|
||||
format = {'hex':hexval, 'macro':openssl_macro, 'note':""}
|
||||
except KeyError:
|
||||
# openssl doesn't have a macro for this.
|
||||
format = {'hex':hexval, 'macro':firefox_macro,
|
||||
'note':"/* No openssl macro found for "+hexval+" */\n"}
|
||||
|
||||
res = """\
|
||||
%(note)s#ifdef %(macro)s
|
||||
CIPHER(%(hex)s, %(macro)s)
|
||||
#else
|
||||
XCIPHER(%(hex)s, %(macro)s)
|
||||
#endif""" % format
|
||||
print(res)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user