Page MenuHomeSoftware Heritage

No OneTemporary

diff --git a/data/common/common.yaml b/data/common/common.yaml
index 8ff89095..bc4a4c2d 100644
--- a/data/common/common.yaml
+++ b/data/common/common.yaml
@@ -1,4126 +1,4141 @@
---
### See also public_keys.yaml for public key/cert fingerprint blocks
###
dns::local_cache: true
dns::nameservers:
- 127.0.0.1
dns::search_domains:
- internal.softwareheritage.org
- softwareheritage.org
dns::disable_local_zones:
- '168.192.in-addr.arpa.'
dns::forward_zones:
'internal.softwareheritage.org.': "%{alias('dns::local_nameservers')}"
'100.168.192.in-addr.arpa.': "%{alias('dns::local_nameservers')}"
'101.168.192.in-addr.arpa.': "%{alias('dns::local_nameservers')}"
'200.168.192.in-addr.arpa.': "%{alias('dns::local_nameservers')}"
'internal.staging.swh.network.': "%{alias('dns::local_nameservers')}"
'internal.admin.swh.network.': "%{alias('dns::local_nameservers')}"
'130.168.192.in-addr.arpa.': "%{alias('dns::local_nameservers')}"
'50.168.192.in-addr.arpa.': "%{alias('dns::local_nameservers')}"
# dns::forwarders per-subnet. No Default value
# dns::local_nameservers per-subnet. No Default value
# ntp::servers per-subnet. Default value:
ntp::servers:
- 0.debian.pool.ntp.org
- 1.debian.pool.ntp.org
- 2.debian.pool.ntp.org
- 3.debian.pool.ntp.org
sudo::configs: {}
# smtp::relay_hostname is per-subnet. Default value:
smtp::relay_hostname: 'pergamon.internal.softwareheritage.org'
smtp::relayhost: "[%{lookup('smtp::relay_hostname')}]"
smtp::mydestination:
- "%{::fqdn}"
smtp::mynetworks:
- 127.0.0.0/8
- "[::ffff:127.0.0.0]/104"
- "[::1]/128"
smtp::relay_destinations: []
smtp::virtual_aliases: []
smtp::extra_aliases_files: []
smtp::mail_aliases:
- user: anlambert
aliases:
- antoine.lambert33@gmail.com
- user: ardumont
aliases:
- antoine.romain.dumont@gmail.com
- user: aeviso
aliases:
- aeviso@softwareheritage.org
- user: ddouard
aliases:
- david.douard@sdfa3.org
- user: olasd
aliases:
- nicolas+swhinfra@dandrimont.eu
- user: morane
aliases:
- morane.gg@gmail.com
- user: postgres
aliases:
- root
- user: rdicosmo
aliases:
- roberto@dicosmo.org
- user: root
aliases:
- olasd
- ardumont
- ddouard
- vsellier
- user: seirl
aliases:
- antoine.pietri1@gmail.com
- user: swhstorage
aliases:
- root
- user: swhworker
aliases:
- zack
- olasd
- ardumont
- vsellier
- user: swhdeposit
aliases:
- ardumont
- vsellier
- user: zack
aliases:
- zack@upsilon.cc
- user: vlorentz
aliases:
- valentin.lorentz@inria.fr
- user: haltode
aliases:
- haltode@gmail.com
- user: danseraf
aliases:
- me@danieleserafini.eu
- user: vsellier
aliases:
- vincent.sellier@gmail.com
- user: tenma
aliases:
- nicolas.gattolin@softwareheritage.org
- user: tg1999
aliases:
- tushar.goel.dav@gmail.com
- user: hakb
aliases:
- hakim.baaloudj@gmail.com
- user: jayesh
aliases:
- jayesh.mail@gmail.com
- user: zaboukha
aliases:
- zainab.ab.k7@gmail.com
- user: bchauvet
aliases:
- bchauvet@softwareheritage.org
networks::manage_interfaces: true
networks::private_routes:
vpn:
network: 192.168.101.0/24
gateway: "%{alias('networks::private_gateway')}"
enabled: true
azure:
network: 192.168.200.0/21
gateway: "%{alias('networks::private_gateway')}"
enabled: true
staging:
network: 192.168.130.0/24
gateway: "%{alias('networks::private_gateway')}"
enabled: false
admin:
network: 192.168.50.0/24
gateway: "%{alias('networks::private_gateway')}"
enabled: false
networks::private_network: 192.168.100.0/24
networks::private_gateway: 192.168.100.1
networks::public_swh_gateway: "%{alias('networks::staging_gateway')}"
locales::default_locale: C.UTF-8
locales::installed_locales:
- C.UTF-8 UTF-8
- en_US.UTF-8 UTF-8
- fr_FR.UTF-8 UTF-8
- it_IT.UTF-8 UTF-8
timezone: Etc/UTC
packages:
- acl
- curl
- dstat
- etckeeper
- fish
- git
- htop
- iotop
- ipython3
- molly-guard
- moreutils
- ncdu
- netcat-openbsd
- nfs-common
- python3
- python3-yaml
- ruby-filesystem
- strace
- tcpdump
- tmux
- vim
- zsh
- zstd
packages::desktop:
- autojump
- chromium
- emacs
- ethtool
- gnome
- i3
- ii
- libx11-dev
- mosh
- myrepos
- net-tools
- ruby-dev
- rxvt-unicode-256color
- screen
- scrot
- tree
- vim-nox
- weechat
- weechat-scripts
packages::devel:
- arcanist
- elpa-magit
- git-email
- gitg
- gitk
- ltrace
- perl-doc
packages::devel::debian:
- devscripts
- dpkg-dev
- reprepro
- sbuild
packages::devel::python:
- graphviz
- make
- python3-arrow
- python3-azure-storage
- python3-blinker
- python3-celery
- python3-cffi
- python3-click
- python3-dateutil
- python3-dev
- python3-dulwich
- python3-flake8
- python3-flask
- python3-flask-api
- python3-flask-limiter
- python3-flask-testing
- python3-libcloud
- python3-msgpack
- python3-nose
- python3-psycopg2
- python3-pygit2
- python3-requests
- python3-retrying
- python3-sphinx
- python3-subvertpy
- python3-vcversioner
- python3-venv
- python3-wheel
packages::devel::broker:
- rabbitmq-server
packages::devel::postgres:
- apgdiff
- barman
- check-postgres
- libpq-dev
- postgresql
- postgresql-autodoc
- postgresql-client
- postgresql-contrib
- postgresql-doc
- postgresql-plpython3-11
users:
root:
uid: 0
full_name:
shell: /bin/bash
groups: []
authorized_keys:
root@louvre:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAABAQDMLEWHlUQldlvZs5rg0y42lRNAfOhD+6pmO8a73DzpJWHTqvAlfteLpU78IPjSacB4dO5ish1E/1RX/HC+Bt8p2v4RBqbCnVLx2w+Hx4ahWu6qbeTVmTz+U++1SQrHnL08fSlhT0OekCw0lRZM2sQq21FZi6+vul97Ecikag4Xaw6Qfumylu94pM3t05uzTUlKk1+6VMCjhT8dlSe8VS8OirVQpE/OqYtTMAWtQaMXGHPCsqDdYRAKzkJ8GjH7ydZmX5VCRyqS0RvPKAlcJfLCs5HBtv0u5rbeGtiHhuzhj/j3YgS/6NJOC2mUfcetcDOMPLnhkKpnF0vUAzTsJ7aR
root@banco:
type: ssh-ed25519
key: AAAAC3NzaC1lZDI1NTE5AAAAIDcljv9eR52wJsu9yYan6/riIQw70lQuyz+Qt0XpGXMs
zack:
uid: 1000
full_name: Stefano Zacchiroli
shell: /usr/bin/zsh
groups:
- adm
- swhdev
- swhstorage
- swhscheduler
- swhdeploy
- sudo
- gitorious
- swhteam
authorized_keys:
zack-software-heritage:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAACAQDU0O8tkUqtQCelLEatOGfGpx1sIwHPSMA+7OdXoZjZG5pT9Sfgf3ITsNgo1iYWge5bpH/TKhhvf20B05fa8cCEE5ULaD+xdV9eTIvBEaCiP36HH33WNl/UV8T8klTG2sqBXUgLMJuinfGkuRJ977ndm7mjNwzl3Ghf6JwKfpHrvob4GLc0hm54yzcnNEzQZLcdxmOCWdwTINKnL+W/DDM8NR3vNF6T5+xaiLJzsS0IGcTubklugD3m05qbswS/uACWys3FzRM8tttw/0wCRrC9SCSKoDLonab5y3Ld6vCj1k12J2RAHSqJYwVCm70JRPWZcmU67Udi6kbqkJMftp04K0pplu8V7RLPrpwLyH4sPx7Kkhslvxqj0rerLPOkoDkqneFgxNoMcxN5ayod7fBJAq5jQUmGozeTtgPLKybnxRDhsYpkEH9paZroQ3CqDsA0dptOpedVpcQUSbiLMaYd8kgCPkVIdKANnTGGXDcTfWv21IvFx6sKm1kld2Me3ExVMq7JFcmXutF/IQom9F4vj/xd/7Lt4KmqZKyiAq4n5iaPIRUbZvmwd2D6umOHpMGlqKwtsiWRUYnAVvhRfuSZmgrGgliYiYr+vU2xeWe+XXQhP9vt3eItmdSp/8/+a2lqaIE9slE75hEI2n8in7DeSn6QhFDbyUKwZz5OwK7QVw==
olasd:
uid: 1001
full_name: Nicolas Dandrimont
shell: /bin/bash
groups:
- adm
- swhdev
- swhstorage
- swhscheduler
- swhdeploy
- sudo
- gitorious
- swhteam
authorized_keys:
nicolasd@darboux:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAABAQDZ1TCpfzrvxLhEMhxjbxqPDCwY0nazIr1cyIbhGD2bUdAbZqVMdNtr7MeDnlLIKrIPJWuvltauvLNkYU0iLc1jMntdBCBM3hgXjmTyDtc8XvXseeBp5tDqccYNR/cnDUuweNcL5tfeu5kzaAg3DFi5Dsncs5hQK5KQ8CPKWcacPjEk4ir9gdFrtKG1rZmg/wi7YbfxrJYWzb171hdV13gSgyXdsG5UAFsNyxsKSztulcLKxvbmDgYbzytr38FK2udRk7WuqPbtEAW1zV4yrBXBSB/uw8EAMi+wwvLTwyUcEl4u0CTlhREljUx8LhYrsQUCrBcmoPAmlnLCD5Q9XrGH
mirzakhani-ed25519:
type: ssh-ed25519
key: AAAAC3NzaC1lZDI1NTE5AAAAID/qk9xQq6KpRbRjMVpMzPM7unmGnJp+i6oQ3a0NA65k
ardumont:
uid: 1003
full_name: Antoine R. Dumont
shell: /usr/bin/zsh
groups:
- adm
- swhdev
- swhstorage
- swhscheduler
- swhdeploy
- sudo
- gitorious
- swhteam
authorized_keys:
eniotna.t@gmail.com:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAABAQDZarzgHrzUYspvrgSI6fszrALo92BDys7QOkJgUfZa9t9m4g7dUANNtwBiqIbqijAQPmB1zKgG6QTZC5rJkRy6KqXCW/+Qeedw/FWIbuI7jOD5WxnglbEQgvPkkB8kf1xIF7icRfWcQmK2je/3sFd9yS4/+jftNMPPXkBCxYm74onMenyllA1akA8FLyujLu6MNA1D8iLLXvz6pBDTT4GZ5/bm3vSE6Go8Xbuyu4SCtYZSHaHC2lXZ6Hhi6dbli4d3OwkUWz+YhFGaEra5Fx45Iig4UCL6kXPkvL/oSc9KGerpT//Xj9qz1K7p/IrBS8+eA4X69bHYYV0UZKDADZSn
ardumont@louvre:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAABAQC0Xj8nwGWTb6VGFNIrlhVTLX6VFTlvpirjdgOTOz8riRxBTS9ra35g3cz8zfDl0iVyE455GXzxlm33w/uu3DX0jQOIzkcoEBRw+T33EK89lo6tCCd9xQrteWCTNR1ZBFloHSnYk2m7kw9kyrisziyAdULsCrXmMd3BH1oJyEpISA+sv/dtVpIOWdEQmkbLmdHl2uEdjBLjqb3BtAp2oJZMmppE5YjAx0Aa1+7uSnURf7NnwMx+0wTDMdfqn8z4wqI8eQny+B+bqLH9kY++52FfMVALuErGh5+75/vtd2xzRQamjKsBlTGjFFbMRagZiVNLDX2wtdudhNmnQDIKA+rH
ardumont@yavin4:
type: ssh-ed25519
key: AAAAC3NzaC1lZDI1NTE5AAAAIPsJCCMKJEXEEsHyoFOrydDmXxL8B4yhzrE8PcDVtCjr
swhworker:
uid: 1004
full_name: SWH Worker Acccount
shell: /bin/bash
groups:
- swhdeploy
- gitorious
swhstorage:
uid: 1005
full_name: SWH Storage Account
shell: /bin/bash
groups:
- swhdeploy
- swhstorage
swhwebapp:
uid: 1006
full_name: SWH Web App Account
shell: /bin/bash
groups: []
swhbackup:
uid: 1007
full_name: SWH Backup Account
shell: /bin/bash
groups: []
rdicosmo:
uid: 1008
full_name: Roberto Di Cosmo
shell: /bin/bash
groups:
- swhteam
authorized_keys:
dicosmo@voyager:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAACAQC5aS/3Cps2Ru9EW+nIF9Z9o6/xq1thwtCgpIjSPgcrm2BVisj6xbD5OOapS3U6BpLKjWZG8sMGBCsJJ3S1cP0s2I+xHFToqCcbfOxIe/tq/UgTtxGJ0+TfUKNoD+QJjIKnjyC+HVEQm5bSm8mJv0vptj4On8yNopytSGuLcFHHnMB2t+IOkHnTW7n3emhh3SZKAcpI1h7WvPqsqBobMFDMeqvGeHaH2AM2OSoUi7AY+MmcVL0Je6QtJqpz60QI5dvaM4AsobC12AZSJKXnuqQTY6nJy4r9jPRK8RUqo5PuAAsNtlxf5xA4s1LrDR5PxBDpYz47Pq2LHtI9Hgf/SFB3IqZeBKqquMI1xThRBwP307/vOtTiwJr4ZKcpOH+SbU7Tnde4n8siM719QZM8VITtrbwm/VBiEwvhGC/23npX4S55W7Et/l9gmeP3Q+lSw50vBuQhBSn7BzedPM1CqbTN/zqM8TCDUtPVIo+6b2s5ao/Vcq9vBXm5bP0xZeNsqsCl05zpCShudKpT6AlMGAaRTd6NUHHsf4D1JjNx3v42R3vQr6OgHELVMGECuyPs3zWHOS/P6AdD0yJTSOMaklRh2HGN8uj0+aQ7RhnrkYqRfhN+6UkrTANuxdb44AGdLmBAKIYglVrAJe+DEji/LzJdZ22baAWg4ar/WikpFJtxkw==
swhteamannex:
uid: 1009
full_name: SWH Team Git Annex Account
shell: /bin/bash
groups:
- swhteam
authorized_keys:
swhteamannex@louvre:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAACAQDL/Ef9kktq/QkJ0lohan8ObQ3o7hMf7EOQPjO+u7UhIMjBNufJnaftQRGIA6N1/wEsDvxxNNz75/oJECJHgZs2OOTJJPsKfYeybmSBocSa/sn6IKK7/b/qlwHJlSGWPGVgbtfP0KexlSAKAmKZuJyqjES5igTLV5w4wTjvUUen9QyefuUehnCX3MJhTpoyixp7icXE80aNMaCPwHZppKb/28sNlPX3MbSONGM45wSFRXNuj0mAAjrgojkhAqFNnI9oKNAt9mDcw1hV0n86VvrDhEbMCJP/z58ecn376BgyXQ8zNUPIr2g0SrSPUNjfxZHfJ0XYpF7624wOMOmZE3fsQzZh+FeMF0IFRPvcG34RaelV9dXiy+/q45oqwbMF464gFSbyt++4jpgqHdsARM4zR//jBhyLvFXR+GaKC3hFENno5W5Raff4XE5rzN/q9jVJBNfvfuEPWrapyM3A/ePeuK3SyNJwyIx+bOEQXsRdxEWKszTeJO2SLPWtCrKrC+G4/HktQSQOj5S9a+N6HoKD8E889eBEYoeZGPIuzMot4cuUlyPt3P99z4oRIaeC6XwUCvZCD2DaTAkQWQMsmOn+soaeZ1zBHbsCBbV0mBMRx7K4Vjs62vhSelryQAXW+cBgd6+f5XBjOnNhHQhsNsDfYP4Kmztn58faQV2TzGG5ow==
swhscheduler:
uid: 1010
full_name: SWH Scheduler Account
shell: /bin/bash
groups:
- swhscheduler
jbertran:
uid: 2001
full_name: Jordi Bertran de Balanda
shell: /bin/false
groups: []
password: "!"
qcampos:
uid: 2002
full_name: Quentin Campos
shell: /bin/false
groups: []
password: "!"
gitorious:
uid: 5000
full_name: Gitorious System User
shell: /bin/false
groups:
- gitorious
fiendish:
uid: 1011
full_name: Avi Kelman
shell: /bin/false
groups: []
password: "!"
morane:
uid: 1012
full_name: Morane Otilia Gruenpeter
shell: /bin/bash
groups:
- swhdev
- swhstorage
- swhteam
authorized_keys:
morane.gg@gmail.com:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAABAQDm8kH1pP+4ENKmpkTCkL2ashxxnOFVndGrcvfX05lV1hOo2NdItpdoR9txIgFEs3d7v73mtH4nWciUyaK7FIByrtvsR2TIhdVgEcb0Xai8viV+sDMTndpiNlWNilbfxm0K70tgpG4BeSWRJy8cPxnCR9CWoB2Vo9Df7lDKz1LXDgfY4VLJd69ahf1DPFUDjpWIEQdPFX2ZyGUYM+0yPXIoyYW/qreDt1JkYZXXVbRAV8j44/TVgTRYJLgYb9ThW6WzlGM1S4uP7GQdAuROCcspqW3ahV/UmV4Z9SM6S34NN182KvM0Ve7uxAPQz+IdWOgZTK0pvd+hfjHKbLSTA6I3
seirl:
uid: 1013
full_name: Antoine Pietri
shell: /usr/bin/zsh
groups:
- swhdev
- swhstorage
- swhteam
- swhdeploy
authorized_keys:
seirl:
type: ssh-ed25519
key: AAAAC3NzaC1lZDI1NTE5AAAAILiua8eEg+nU0XSbYPTgnOMftzvpbN+u7v5jDabeO/0E
ssushant:
uid: 1014
full_name: Sushant
shell: /bin/false
groups: []
password: "!"
anlambert:
uid: 1015
full_name: Antoine Lambert
shell: /bin/bash
groups:
- swhdev
- swhstorage
- swhteam
- swhdeploy
- swhwebapp
authorized_keys:
antoine.lambert@inria.fr:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAACAQDLWPcZnSUszEedMa39dT3ZCHpRod3NTs6WT4OfMMRVahrhTtWYdSiNGy8U3kEQveTZvMrb9WLtLPB3K8o7Xrf8WCI8iTOl9eb9DVjE9XL+zS0ZAcEmoZ5YH8e3gEDoDm8ZrMxF+V5XSlvhNi6kbWzJdqhXu++bJHHqGrKUHeTCQCfpYYMrsnvhPjtxe+90BK7e+IGm1Ha8LZMCCmOtz0XggxD8d2mFBaP2p8v9xsM48KfwFvsRMb3TZIaO/+NcsRSTe7wfFAR1pb14pi5LZAHeb2tpWfVH2vQGaE7Rej+Ycf4UOeaRmFGpimw7u7fugvDvKfZ/vs7w7Qs2RtxNdqJf9JM+vvi78OQbloufot1Tz2r19aDbhM9nsCn+Uo3rNfkmD+UcSMKrRJCMEXVBbaY/bgzs7XoqCJ8ODE2U/dF3NtHBZr+CB52iilUtemXy+Xwqw4TSs/r9vW7/XueTdb0Yp/cUs5uLCqCwlMpGS5okorpdJextp5gRuN6EMlUo6PffRiz5T0CqKm1xJu0NeT0EaacAXoGTDQaS4pIQGglqWfAOmjej9dM8gxAF6rgrx70uJt6Hy18tvzdB5iwJ4F2LUjcZhFnrxjUDzhjPoDBiRtPNgEKrCc30OHsveqXwMPo3v/d3np1Vpkum0JEwmp83q92P5T2rbf+wiruxZhhtww==
grouss:
uid: 1016
full_name: Guillaume Rousseau
shell: /bin/false
groups:
- swhteam
authorized_keys:
guillaume.rousseau@univ-paris-diderot.fr:
type: ssh-rsa
key: disabled
ftigeot:
uid: 1017
full_name: Francois Tigeot
shell: /bin/false
password: "!"
groups: []
swhdeposit:
uid: 1018
full_name: SWH Deposit App Account
shell: /bin/bash
groups:
- swhscheduler
swhvault:
uid: 1019
full_name: SWH Vault Account
shell: /bin/bash
groups:
- swhdeploy
- swhstorage
- swhvault
ddouard:
uid: 1020
full_name: David Douard
shell: /bin/bash
groups:
- adm
- sudo
- swhdev
- swhteam
- swhscheduler
authorized_keys:
david.douard@sdfa3.org:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAACAQCoON7De2Bx03owpZfzbOyucZTmyQdm7F+LP4D4H9EyOFxtyMpjH2S9Ve/JvMoFIWGQQlXSkYzRv63Z0BzPLKD2NsYgomcjOLdw1Baxnv8VOH+Q01g4B3cabcP2LMVjerHt/KRkY3E6dnKLQGE5UiER/taQ7KazAwvu89nUd4BJsV43rJ3X3DtFEfH3lR4ZEIgFyPUkVemQAjBhueFmN3w8debOdr7t9cBpnYvYKzLQN+G/kQVFc+fgs+fFOtOv+Az9kTXChfLs5pKPBm+MuGxz4gS3fPiAjY9cN6vGzr7ZNkCRUSUjJ10Hlm7Gf2EN8f+k6iSR4CPeixDcZ+scbCg4dCORqTsliSQzUORIJED9fbUR6bBjF4rRwm5GvnXx5ZTToWDJu0PSHYOkomqffp30wqvAvs6gLb+bG1daYsOLp+wYru3q09J9zUAA8vNXoWYaERFxgwsmsf57t8+JevUuePJGUC45asHjQh/ON1H5PDXtULmeD1GKkjqyaS7SBNbpOWgQb21l3pwhLet3Mq3TJmxVqzGMDnYvQMUCkiPdZq2pDplzfpDpOKLaDg8q82rR5+/tAfB4P2Z9RCOqnMLRcQk9AluTyO1D472Mkp+v5VA4di0eTWZ0tuzwYJEft0OVo+QOVTslCGsyGiEUoOcHzkrdgsT5uQziyAfgTMSuiw==
vlorentz:
uid: 1021
full_name: Valentin Lorentz
shell: /usr/bin/zsh
groups:
- swhdev
- swhteam
authorized_keys:
vlorentz@pro-desktop:
type: ssh-ed25519
key: AAAAC3NzaC1lZDI1NTE5AAAAILsRMQjrrfUjX1ka9e6YlyMyDvTC+qk5a21Fp9yXYI7p
vlorentz@perso-laptop:
type: ssh-ed25519
key: AAAAC3NzaC1lZDI1NTE5AAAAIIjJoY4XBTTNsxLVF/sUKBI4WGR2AIiR9qfMdspnsRfJ
vlorentz@pro-laptop:
type: ssh-ed25519
key: AAAAC3NzaC1lZDI1NTE5AAAAIND42yBr8C/zKJiQx2I84yIlMNsS9IMTUkdb9GjYgFON
haltode:
uid: 1022
full_name: Thibault Allancon
shell: /usr/bin/zsh
groups:
- swhdev
- swhteam
authorized_keys:
haltode@gmail.com:
type: ssh-ed25519
key: AAAAC3NzaC1lZDI1NTE5AAAAIORGwY56PpvgwMWqDei718PPriV6U7LL5JMPJWS7zTcg
danseraf:
uid: 1023
full_name: Daniel Serafini
groups:
- swhdev
shell: /usr/bin/fish
authorized_keys:
me@danieleserafini.eu:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAABgQDsQ1QxD/xHOOhCAarH9o3oAfT7YCwxXCAVaazxC1ZMygWZUE95oMu6D2Wib5Q4GBKD35ddclY/l5GsYB5uXbl1UxAFUPe4COqTEt+7deMSWakv46ceb5oHxOkMAF4w400FV1Pi1usk2TpArarOPuxN7yKu54sBACI5HEezn3KOvxPYt/DUAt+XdrfLsiZPyzjOYYezocCV+O1PkivhC99cXHtlwTBRntWTjlyUt9p46U6Uf2G9u88v4v2KopH0sQG7nAYNXN7W14pB925fnDYDHFYUoKDCBbJiQMMKKlQxJZLSfh/KX6kX7OIXOiMclIfmBYAUwDHU3MevdczAIj8JWspUVyRf32B1jnD+H8UnDTCwApCFytcvzDuoYCgiUVk4bDpJOHeb8V4dh6UZt0DFa4iiLsVIX8MjcqaI5TmmvaYgjGPSTLlUsasUJnVqCweQNTiZPhDiqrs258aQvSGf634k10lxmhAAB0xAGVO6Zj1mBjqS+XDJiLzf4mWwm4M=
aeviso:
uid: 1024
full_name: Andres Ezequiel Viso
groups:
- swhdev
- swhteam
shell: /bin/bash
authorized_keys:
andres@powerage:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAABAQDRnVV1UimkTbAAb1FPxDPAqfyCVSrlB7a1Kq83ZKc6e7tTVmOKA55whZa3KwNBwgZGB8389ejCery+WDqSiRZsirup4UN/VZ5X3XJdnRVPPcpzVuWFivPKBMn6D4LMa9D7moMnV9JC5q9zwMNHZQ4qJbGlW44MzWOi0RnPVd28jsRkL0sMRvz+i3KXFGDNeaVPV0YLB4vS1bmEho/8Tu5NlY/Tzkter9qTNN7QNIAbnq8oQF/lLPFAytE0s61ZtoDcXfznJPXWkIljMCTQHlxeDci9DQWUgQc+A+7qWmqkb3lzDYKQrxRfZtl3hbnR3jrtSfTI+q0Nagh0O4FN2QUj
aeviso@powerage:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAABAQCX0CtuTAwQpOQl3qBGm7FAuzKlnSmSUOISKPV3F8ohbYbN682VKnruTPFRwkdy5sidg6I8jVrIeJJQ+WZVrTM6mdtpiLANB4r3sgQdml4l3sWAyNm3UD5wwhsNYlnVgVuzUihK7/ykr9vKYxY3rrAYcLVyLzkq9t7BPVAMHJZj6XBsKqzhATcQxVdVVA+7k7aTSnaNR29AiHEVYb8/1J61W59mNqrqrwcWXaGXgjb4rvzprWhooQd21/9v4rlgwZznFlK20GQUoHy+BVCbpQJznQtQeCvXfTpBYGZjo7DrljbDdafkrz9fa7NqJ3UwRzYITadM6NLDPzD7EIR9q2cf
vsellier:
uid: 1025
full_name: Vincent Sellier
groups:
- swhdev
- swhteam
- sudo
shell: /usr/bin/zsh
authorized_keys:
vsellier@swh-vs1:
type: ssh-ed25519
key: AAAAC3NzaC1lZDI1NTE5AAAAII5oO0jj/IeV04osZ/iaKBZdnuqdziJD3x1VOhtK9tZY
tenma:
uid: 1026
full_name: Nicolas Gattolin
groups:
- swhdev
- swhteam
shell: /bin/false
authorized_keys:
tenma@swh-ng1:
type: ssh-ed25519
key: disabled
tg1999:
uid: 1027
full_name: Tushar Goel
groups:
- swhdev
- swhteam
shell: /bin/false
authorized_keys:
tg@tg-Inspiron-15-7000-Gaming:
type: ssh-ed25519
key: disabled
hakb:
uid: 1028
full_name: Hakim Baaloudj
shell: /bin/false
groups:
- swhdev
- swhteam
authorized_keys:
hbaaloud@po461-pro.paris.inria.fr:
type: ssh-ed25519
key: disabled
jayesh:
uid: 1029
full_name: Jayesh Velayudhan
shell: /bin/bash
groups:
- swhdev
- swhteam
authorized_keys:
jayesh@jayesh:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAABAQCyBrP2uhCnQcEbNHpH6tsP/yl80JGcxtgxJ8m3Gts0keA8JTA6J1qnn9HUmzbdLpoJjCyHMxfW/oBR7G3IGEyD93S2SShaGltNMpFOEq+iAbKR2Y3ZBy8Vvq/bwBneq0dBNs/ITawbYQVMahcbOzB2I3aq+rG149t5icZ/1DPswVc9ZBPigtiReywF3EJ7uBd6JQ6OLRDnwpKZf6LNlkHzIWD5yBRLTmMNEExLcFw6zdw86t5GfgFTlGkARhUh+iXEBhPeVZnCboB9GZVcKURfrV788QUbIFzzKFm6VQt/cVAJDMUQmx/kYjFaV1ejU3f0+k3SdtVbANfQCYUsL2f7
zaboukha:
uid: 1030
full_name: Zeinab Abou Khalil
shell: /bin/bash
groups:
- swhdev
- swhteam
authorized_keys:
spirals@wifi-pro-83-138.paris.inria.fr:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAABgQDocv8kwNgelVAccRs8uDDGPeEalSwH/NhUfbvnum5dlhx7aRUBfswJIRy142TWCskS5h+3EXhwXmsEUfEKSIAkQT9+FlYDxYML7NFy7ONMo0ZUQUWXbMo4PGpQuIPmGFH4AUqL7Y69gdY4p5lUTlV+8wZP6Fi91tsjBwtJqvGyMS7QmeRgY/f7LTEyxzv3jB9XN+DioxAHQDDfodTD1UZlGNSSdIU6yI2oj7pEMGLAw5ghHlI2lIJoGoWY5fR30Yuua714R5DdQZ7yoWUEWAenefmKZmDtaMaUvbDVFvgDZIZSI3Xg+75iogbPhb3tG1psS1geaUo4oPFdYr0xTQ5HySoloFw9segk9j8gUsGqgAHoyEcA3jgoA81iH6l+5tI+qXgj5BMxykAglBDfIbkOoYW39eEJM7Pv5RkXZPdbDVxypTSNGr2uS8xFb3J1ibaHyC8alluZDUkcBjAGOD9XcqYKfuM8zVpL4tOhN5igorebBFJwF03cwU2r1ccIKec=
bchauvet:
uid: 1031
full_name: Benoit Chauvet
shell: /bin/bash
groups:
- swhdev
- swhteam
authorized_keys:
ben@ben-deb:
type: ssh-ed25519
key: AAAAC3NzaC1lZDI1NTE5AAAAIJKPlIonRqTCL3ptpakag49GrY0Aw7SXpyZG2UjEhW18
lunar:
uid: 1032
full_name: Jeremy Bobbio
shell: /bin/bash
groups:
- swhdev
- swhteam
authorized_keys:
lunar@seleucus:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAABgQDIGUnG2HseIZJ9deNXvGzfPU8Iey1TS89AGUGadFxnuy2mDjBepWanJUBbRArm8fv5v+LIJ2MUm3dchP1aEaMYfJp7QUeagJTYVHIrbdFYPAdGgwe5kQzOaWEyqsV3W2AyXkvgKq+V0whWeqJuRxWD+3i6rFLZJxYh1Uf+l8I4bPvD7pLqssLXm5qnzOfSJrZaiu9mtHasscTQUZ2/lZ+veF0mBkqAxmor/lWbTcENCjALgZhdJlg7kz+HFiFO+IaNdAvjayQiEmk2r/SutwA9CdHzAcLh6Jr4iiThRy4RcIB/SACcsYJbUIJkxHiqoWV6LNOICV/pncnWNQYMhyUH3pH+3Gxm3feUpHUT8IQhwb4FS4dctp225LyX+s/6rDzMZU3jKkV090BAvFCiY1EUTaiNAvU9SbjgVzFn40X1dydaucdmh9ziGV56mPfokhR+e3D2QxXNpT6A+3ra/OsTrz55SrKhKo5fJgDp/KJlmwF+wp6jjd78fBJCyMuZb3k=
groups:
adm:
gid: 4 # assigned from base-files
sudo:
gid: 27 # assigned from base-files
www-data:
gid: 33 # assigned from base-files
zack:
gid: 1000
olasd:
gid: 1001
ardumont:
gid: 1003
ddouard:
gid: 1020
swhworker:
gid: 1004
swhdev:
gid: 1002
swhstorage:
gid: 1005
swhdeploy:
gid: 1006
swhbackup:
gid: 1007
swhwebapp:
gid: 1008
swhteam:
gid: 1009
swhscheduler:
gid: 1010
gitorious:
gid: 5000
swhdeposit:
gid: 1018
swhvault:
gid: 1019
vsellier:
gid: 1025
tenma:
gid: 1026
tg1999:
gid: 1027
bchauvet:
gid: 1031
lunar:
gid: 1032
kubenfs:
gid: 42000
gunicorn::statsd::host: 127.0.0.1:8125
munin::master::hostname: munin.internal.softwareheritage.org
rabbitmq::monitoring::user: swhdev
# following password key in private data
# - rabbitmq::monitoring::password
# - swh::deploy::worker::task_broker::password
# - swh::deploy::scheduler::task_broker::password
rabbitmq::server::users:
- name: "%{hiera('rabbitmq::monitoring::user')}"
is_admin: true
password: "%{hiera('rabbitmq::monitoring::password')}"
tags: []
- name: swhconsumer
is_admin: false
password: "%{hiera('swh::deploy::worker::task_broker::password')}"
tags: []
- name: swhproducer
is_admin: false
password: "%{hiera('swh::deploy::scheduler::task_broker::password')}"
tags:
- management
puppet::master::hostname: pergamon.internal.softwareheritage.org
puppet::master::puppetdb: pergamon.internal.softwareheritage.org
puppet::master::codedir: /etc/puppet/code
puppetdb::master::config::terminus_package: puppet-terminus-puppetdb
puppet::master::manage_puppetdb: false
strict_transport_security::max_age: 15768000
php::version: '7.4'
# Those variables get picked up by 'include ::php::fpm::daemon'
php::fpm::daemon::log_owner: www-data
php::fpm::daemon::log_group: adm
php::fpm::daemon::log_dir_mode: '0750'
# Those variables get picked up by 'include ::apache'
apache::server_tokens: 'Prod'
apache::server_signature: 'Off'
apache::trace_enable: 'Off'
apache::manage_group: false
# Those variables get picked up by 'include ::apache::mod::passenger'
apache::mod::passenger::passenger_root: /usr/lib/ruby/vendor_ruby/phusion_passenger/locations.ini
# Those variables need to be set manually in the SSL vhosts.
apache::ssl_protocol: all -SSLv2 -SSLv3 -TLSv1 -TLSv1.1
apache::ssl_honorcipherorder: 'On'
apache::ssl_cipher: ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384
apache::hsts_header: "set Strict-Transport-Security \"max-age=%{hiera('strict_transport_security::max_age')}\""
# Those variables need to be set manually for all vhosts
apache::http_port: 80
apache::https_port: 443
apache::log_formats:
combined_with_duration: "%a %l %u %t \\\"%r\\\" %>s %b \\\"%{literal('%')}{Referer}i\\\" \\\"%{literal('%')}{User-Agent}i\\\" %{literal('%')}{ms}T"
# Hitch TLS proxy configuration
hitch::frontend: "[*]:10443"
hitch::proxy_support: false
hitch::http2_support: false
# Varnish configuration
varnish::http_port: 10080
varnish::proxy_port: 6081
varnish::http2_support: false
varnish::listen:
- ":%{hiera('varnish::http_port')}"
- "[::1]:%{hiera('varnish::proxy_port')},PROXY"
varnish::backend_http_port: "%{hiera('apache::http_port')}"
varnish::admin_listen: 127.0.0.1
varnish::admin_port: 6082
varnish::storage_type: malloc
varnish::storage_size: 256m
varnish::storage_file: /var/lib/varnish/varnish_storage.bin
# varnish::secret in private-data
swh::deploy::reverse_proxy::services:
- deposit
- webapp
- objstorage
letsencrypt::account_email: sysop+letsencrypt@softwareheritage.org
letsencrypt::server: https://acme-v02.api.letsencrypt.org/directory
letsencrypt::gandi_livedns_hook::config:
gandi_api: https://dns.api.gandi.net/api/v5/
zones:
softwareheritage.org:
api_key: "%{alias('gandi::softwareheritage_org::api_key')}"
sharing_id: "%{alias('gandi::softwareheritage_org::sharing_id')}"
swh.network:
api_key: "%{alias('gandi::softwareheritage_org::api_key')}"
sharing_id: "%{alias('gandi::swh_network::sharing_id')}"
letsencrypt::gandi_paas_hook::config:
gandi_xmlrpc: https://rpc.gandi.net/xmlrpc/
zone_keys:
softwareheritage.org: "%{alias('gandi::softwareheritage_org::xmlrpc_key')}"
sentry::vhost::name: sentry.softwareheritage.org
minio::vhost::name: minio.admin.swh.network
letsencrypt::certificates::exported_directory: "%{::puppet_vardir}/letsencrypt_exports"
letsencrypt::certificates::directory: /etc/ssl/certs/letsencrypt
letsencrypt::certificates:
archive_production:
domains:
# Should match with keycloak::resources::realms.SoftwareHeritage.clients.swh-web.redirect_uris
- archive.softwareheritage.org
- base.softwareheritage.org
- archive.internal.softwareheritage.org
archive_staging:
domains:
# Should match with keycloak::resources::realms.SoftwareHeritageStaging.clients.swh-web.redirect_uris
- webapp.staging.swh.network
- webapp-rp.internal.staging.swh.network
archive_webapp1:
domains:
- webapp1.internal.softwareheritage.org
deposit_production:
domains:
- deposit.softwareheritage.org
- deposit.internal.softwareheritage.org
deposit_staging:
domains:
- deposit.staging.swh.network
- deposit-rp.internal.staging.swh.network
graphql_staging:
domains:
- graphql.staging.swh.network
- graphql-rp.internal.staging.swh.network
hedgedoc:
domains:
- hedgedoc.softwareheritage.org
objstorage_production:
domains:
- objstorage.softwareheritage.org
- objstorage.internal.softwareheritage.org
objstorage_staging:
domains:
- objstorage.staging.swh.network
- objstorage-rp.internal.staging.swh.network
stats_export:
domains:
- stats.export.softwareheritage.org
- pergamon.softwareheritage.org
jenkins:
domains:
- jenkins.softwareheritage.org
"%{lookup('sentry::vhost::name')}":
domains:
- "%{lookup('sentry::vhost::name')}"
"%{lookup('minio::vhost::name')}":
domains:
- "%{lookup('minio::vhost::name')}"
storage1.internal.staging.swh.network:
domains:
- broker1.journal.staging.swh.network
- journal1.internal.staging.swh.network
keycloak:
domains:
- auth.softwareheritage.org
kafka1.internal.softwareheritage.org:
domains:
- broker1.journal.softwareheritage.org
- kafka1.internal.softwareheritage.org
kafka2.internal.softwareheritage.org:
domains:
- broker2.journal.softwareheritage.org
- kafka2.internal.softwareheritage.org
kafka3.internal.softwareheritage.org:
domains:
- broker3.journal.softwareheritage.org
- kafka3.internal.softwareheritage.org
kafka4.internal.softwareheritage.org:
domains:
- broker4.journal.softwareheritage.org
- kafka4.internal.softwareheritage.org
kafka01.euwest.azure.internal.softwareheritage.org:
domains:
- kafka01.euwest.azure.internal.softwareheritage.org
- kafka01.euwest.azure.softwareheritage.org
kafka02.euwest.azure.internal.softwareheritage.org:
domains:
- kafka02.euwest.azure.internal.softwareheritage.org
- kafka02.euwest.azure.softwareheritage.org
kafka03.euwest.azure.internal.softwareheritage.org:
domains:
- kafka03.euwest.azure.internal.softwareheritage.org
- kafka03.euwest.azure.softwareheritage.org
kafka04.euwest.azure.internal.softwareheritage.org:
domains:
- kafka04.euwest.azure.internal.softwareheritage.org
- kafka04.euwest.azure.softwareheritage.org
kafka05.euwest.azure.internal.softwareheritage.org:
domains:
- kafka05.euwest.azure.internal.softwareheritage.org
- kafka05.euwest.azure.softwareheritage.org
kafka06.euwest.azure.internal.softwareheritage.org:
domains:
- kafka06.euwest.azure.internal.softwareheritage.org
- kafka06.euwest.azure.softwareheritage.org
annex.softwareheritage.org:
domains:
- annex.softwareheritage.org
bitbucket-archive.softwareheritage.org:
domains:
- bitbucket-archive.softwareheritage.org
debian.softwareheritage.org:
domains:
- debian.softwareheritage.org
- debian.internal.softwareheritage.org
docs.softwareheritage.org:
domains:
- docs.softwareheritage.org
intranet.softwareheritage.org:
domains:
- intranet.softwareheritage.org
wiki.softwareheritage.org:
domains:
- wiki.softwareheritage.org
icinga.softwareheritage.org:
domains:
- icinga.softwareheritage.org
- icinga.internal.softwareheritage.org
wg.softwareheritage.org:
domains:
- wg.softwareheritage.org
git.softwareheritage.org:
domains:
- git.softwareheritage.org
forge.softwareheritage.org:
domains:
- forge.softwareheritage.org
grafana.softwareheritage.org:
domains:
- grafana.softwareheritage.org
www-dev:
domains:
- www-dev.softwareheritage.org
deploy_hook: gandi_paas
www:
domains:
- softwareheritage.org
- www.softwareheritage.org
deploy_hook: gandi_paas
gandi-redirects:
domains:
- softwareheritage.org
- sponsors.softwareheritage.org
- sponsorship.softwareheritage.org
- testimonials.softwareheritage.org
deploy_hook: gandi_paas
netbox:
domains:
- inventory.internal.admin.swh.network
# legacy hostname, needed for the redirect
inventory.internal.softwareheritage.org:
domains:
- inventory.internal.softwareheritage.org
azure-billing.internal.admin.swh.network:
domains:
- azure-billing.internal.admin.swh.network
maven-exporter.internal.staging.swh.network:
domains:
- maven-exporter.internal.staging.swh.network
maven-exporter.internal.softwareheritage.org:
domains:
- maven-exporter.internal.softwareheritage.org
pergamon.softwareheritage.org:
domains:
- pergamon.softwareheritage.org
- pergamon.internal.softwareheritage.org
thanos.internal.admin.swh.network:
domains:
- thanos.internal.admin.swh.network
bind::update_key: local-update
bind::zones:
internal.softwareheritage.org:
domain: internal.softwareheritage.org
100.168.192.in-addr.arpa:
domain: 100.168.192.in-addr.arpa
101.168.192.in-addr.arpa:
domain: 101.168.192.in-addr.arpa
internal.staging.swh.network:
domain: internal.staging.swh.network
internal.admin.swh.network:
domain: internal.admin.swh.network
50.168.192.in-addr.arpa:
domain: 50.168.192.in-addr.arpa
128.168.192.in-addr.arpa:
domain: 128.168.192.in-addr.arpa
130.168.192.in-addr.arpa:
domain: 130.168.192.in-addr.arpa
200.168.192.in-addr.arpa:
domain: 200.168.192.in-addr.arpa
201.168.192.in-addr.arpa:
domain: 201.168.192.in-addr.arpa
202.168.192.in-addr.arpa:
domain: 202.168.192.in-addr.arpa
203.168.192.in-addr.arpa:
domain: 203.168.192.in-addr.arpa
204.168.192.in-addr.arpa:
domain: 204.168.192.in-addr.arpa
205.168.192.in-addr.arpa:
domain: 205.168.192.in-addr.arpa
206.168.192.in-addr.arpa:
domain: 206.168.192.in-addr.arpa
207.168.192.in-addr.arpa:
domain: 207.168.192.in-addr.arpa
# Defaults for secondary bind server
bind::zones::type: slave
bind::zones::masters:
- 192.168.100.29
bind::zones::allow_transfers:
- 192.168.100.0/24
- 192.168.101.0/24
- 192.168.200.22
bind::zones::default_data:
zone_type: "%{alias('bind::zones::type')}"
dynamic: true
masters: "%{alias('bind::zones::masters')}"
transfer_source: ''
allow_updates: []
update_policies: ''
allow_transfers: "%{alias('bind::zones::allow_transfers')}"
dnssec: false
key_directory: ''
ns_notify: true
also_notify: ''
allow_notify: ''
forwarders: ''
forward: ''
source: ''
ns_records:
- pergamon.internal.softwareheritage.org.
- ns0.euwest.azure.internal.softwareheritage.org.
bind::resource_records:
archive/CNAME:
type: CNAME
record: archive.internal.softwareheritage.org
data: moma.internal.softwareheritage.org.
objectorage-ro/CNAME:
type: CNAME
record: objstorage.internal.softwareheritage.org
data: moma.internal.softwareheritage.org.
db/CNAME:
type: CNAME
record: db.internal.softwareheritage.org
data: belvedere.internal.softwareheritage.org.
debian/CNAME:
type: CNAME
record: debian.internal.softwareheritage.org
data: pergamon.internal.softwareheritage.org.
backup/CNAME:
type: CNAME
record: backup.internal.softwareheritage.org
data: banco.internal.softwareheritage.org.
icinga/CNAME:
type: CNAME
record: icinga.internal.softwareheritage.org
data: pergamon.internal.softwareheritage.org.
faitout/CNAME:
type: CNAME
record: faitout.internal.softwareheritage.org
data: prado.internal.softwareheritage.org.
graph/CNAME:
type: CNAME
record: graph.internal.softwareheritage.org
data: granet.internal.softwareheritage.org.
logstash/CNAME:
type: CNAME
record: logstash.internal.softwareheritage.org
data: logstash0.internal.softwareheritage.org.
kibana/CNAME:
type: CNAME
record: kibana.internal.softwareheritage.org
data: banco.internal.softwareheritage.org.
rabbitmq/CNAME:
type: CNAME
record: rabbitmq.internal.softwareheritage.org
data: saatchi.internal.softwareheritage.org.
inventory/CNAME:
type: CNAME
record: inventory.internal.softwareheritage.org
data: bojimans.internal.admin.swh.network.
inventory-admin/CNAME:
type: CNAME
record: inventory.internal.admin.swh.network
data: bojimans.internal.admin.swh.network.
k8s-admin/A: # main ingress ip of the admin k8s cluster
record: k8s-admin.internal.admin.swh.network
data: 192.168.50.44
argocd/CNAME:
type: CNAME
record: argocd.internal.admin.swh.network
data: k8s-admin.internal.admin.swh.network.
k8s-admin-thanos/CNAME:
type: CNAME
record: k8s-admin-thanos.internal.admin.swh.network
data: k8s-admin.internal.admin.swh.network.
minio-console/CNAME:
type: CNAME
record: minio-console.internal.admin.swh.network
data: k8s-admin.internal.admin.swh.network.
# Decommissioning in progress
k8s-argocd/A: # main ingress ip of the argocd cluster
record: k8s-argocd.internal.admin.swh.network
data: 192.168.50.42
k8s-archive-production/A: # main ingress ip of the archive production cluster
record: k8s-archive-production.internal.softwareheritage.org
data: 192.168.100.119
k8s-archive-production-thanos/A: # main ingress ip of the archive production cluster
type: CNAME
record: k8s-archive-production-thanos.internal.softwareheritage.org
data: k8s-archive-production.internal.softwareheritage.org.
+ k8s-archive-production-rke2/A:
+ record: k8s-archive-production-rke2.internal.softwareheritage.org
+ data: 192.168.100.139
+ k8s-archive-production-rke2-thanos/A:
+ type: CNAME
+ record: k8s-archive-production-rke2-thanos.internal.softwareheritage.org
+ data: k8s-archive-production-rke2.internal.softwareheritage.org.
reaper/CNAME:
type: CNAME
record: reaper.internal.softwareheritage.org
- data: k8s-archive-production.internal.softwareheritage.org.
+ data: k8s-archive-production-rke2.internal.softwareheritage.org.
k8s-archive-staging/A: # main ingress ip of the archive staging cluster
record: k8s-archive-staging.internal.staging.swh.network
data: 192.168.130.129
graphql-staging/CNAME:
type: CNAME
record: graphql.internal.staging.swh.network
data: k8s-archive-staging.internal.staging.swh.network.
k8s-archive-staging-thanos/CNAME:
type: CNAME
record: k8s-archive-staging-thanos.internal.staging.swh.network
data: k8s-archive-staging.internal.staging.swh.network.
k8s-gitlab-production/A: # internal ingress for the gitlab production cluster
record: k8s-gitlab-production.euwest.azure.internal.softwareheritage.org
data: 192.168.200.5
k8s-gitlab-production-thanos/CNAME:
type: CNAME
record: k8s-gitlab-production-thanos.euwest.azure.internal.softwareheritage.org
data: k8s-gitlab-production.euwest.azure.internal.softwareheritage.org.
k8s-gitlab-staging/A:
record: k8s-gitlab-staging.internal.staging.swh.network
data: 192.168.200.15
k8s-gitlab-staging-thanos/CNAME:
type: CNAME
record: k8s-gitlab-staging-thanos.internal.staging.swh.network
data: k8s-gitlab-staging.internal.staging.swh.network.
k8s-rancher/A:
record: k8s-rancher.euwest.azure.internal.softwareheritage.org
data: 192.168.200.19
k8s-rancher-app/CNAME:
type: CNAME
record: rancher.euwest.azure.internal.softwareheritage.org
data: k8s-rancher.euwest.azure.internal.softwareheritage.org.
k8s-rancher-thanos/CNAME:
type: CNAME
record: k8s-rancher-thanos.euwest.azure.internal.softwareheritage.org
data: k8s-rancher.euwest.azure.internal.softwareheritage.org.
glyptotek/A: # OPNSense firewall, not managed by puppet
record: "%{alias('opnsense::hosts.glyptotek.fqdn')}"
data: "%{alias('opnsense::hosts.glyptotek.ip')}"
pushkin/A: # OPNSense firewall, not managed by puppet
record: "%{alias('opnsense::hosts.pushkin.fqdn')}"
data: "%{alias('opnsense::hosts.pushkin.ip')}"
internalgw/A: # Firewal(s)l vip, not managed by puppet
record: gw.internal.softwareheritage.org
data: 192.168.100.1
staging-rp0/A:
record: rp0.internal.staging.swh.network
data: 192.168.130.20
staging-webapp/A:
record: webapp.internal.staging.swh.network
data: 192.168.130.30
staging-webapp-rp/CNAME:
type: CNAME
record: webapp-rp.internal.staging.swh.network
data: rp0.internal.staging.swh.network.
staging-deposit/A:
record: deposit.internal.staging.swh.network
data: 192.168.130.31
staging-deposit-rp/CNAME:
type: CNAME
record: deposit-rp.internal.staging.swh.network
data: rp0.internal.staging.swh.network.
journal1/CNAME:
type: CNAME
record: journal1.internal.staging.swh.network
data: storage1.internal.staging.swh.network.
admin-db1/CNAME:
type: CNAME
record: db1.internal.admin.swh.network
data: dali.internal.admin.swh.network.
azure-billing-report/CNAME:
type: CNAME
record: azure-billing.internal.admin.swh.network
data: money.internal.admin.swh.network.
maven-index-exporter/CNAME:
type: CNAME
record: maven-exporter.internal.staging.swh.network
data: maven-exporter0.internal.staging.swh.network.
# Non-puppet azure hosts
pgmirror0.euwest.azure/A:
record: pgmirror0.euwest.azure.internal.softwareheritage.org
data: 192.168.200.51
# VPN hosts
zack/A:
record: zack.internal.softwareheritage.org
data: 192.168.101.6
olasd/A:
record: olasd.internal.softwareheritage.org
data: 192.168.101.10
ardumont/A:
record: ardumont.internal.softwareheritage.org
data: 192.168.101.14
ardumont-desktop/A:
record: ardumont-desktop.internal.softwareheritage.org
data: 192.168.101.158
rdicosmo/A:
record: rdicosmo.internal.softwareheritage.org
data: 192.168.101.38
grand-palais/A:
record: grand-palais.internal.softwareheritage.org
data: 192.168.101.62
grandpalais/CNAME:
type: CNAME
record: grandpalais.internal.softwareheritage.org
data: grand-palais.internal.softwareheritage.org.
petit-palais/A:
record: petit-palais.internal.softwareheritage.org
data: 192.168.101.58
petitpalais/CNAME:
type: CNAME
record: petitpalais.internal.softwareheritage.org
data: petit-palais.internal.softwareheritage.org.
giverny/A:
type: A
record: giverny.internal.softwareheritage.org
data: 192.168.101.118
ddouard-desktop/A:
record: ddouard-desktop.internal.softwareheritage.org
data: 192.168.101.162
vlorentz-desktop/A:
record: vlorentz-desktop.internal.softwareheritage.org
data: 192.168.101.166
oturtle-desktop/A:
record: oturtle-desktop.internal.softwareheritage.org
data: 192.168.101.222
bind::resource_records::default_data:
type: A
bind::clients:
- 192.168.50.0/24
- 192.168.100.0/24
- 192.168.101.0/24
- 192.168.102.0/23
- 192.168.130.0/24
- 192.168.200.0/21
- 127.0.0.0/8
- '::1/128'
bind::autogenerate:
192.168.100.0/24: .internal.softwareheritage.org
192.168.200.0/21: .internal.softwareheritage.org
192.168.130.0/24: .internal.staging.swh.network
192.168.50.0/24: .internal.admin.swh.network
backups::legacy_storage: /srv/backups
backups::enable: true
backups::base: /
backups::exclude:
- dev
- proc
- run
- srv/backups
- srv/db-backups
- srv/elasticsearch
- srv/remote-backups
- srv/softwareheritage/objects
- srv/softwareheritage/postgres
- srv/softwareheritage/scratch
- srv/softwareheritage/scratch.2TB
- srv/storage
- sys
- tmp
- var/cache
- var/lib/mysql
- var/log/journal
- var/run
- var/tmp
phabricator::basepath: /srv/phabricator
phabricator::user: phabricator
phabricator::group: phabricator
phabricator::vcs_user: git
phabricator::notification::client_host: 127.0.0.1
phabricator::notification::client_port: 22280
phabricator::notification::listen: "%{hiera('phabricator::notification::client_host')}:%{hiera('phabricator::notification::client_port')}"
phabricator::mysql::database_prefix: phabricator
phabricator::mysql::username: phabricator
phabricator::mysql::readonly_usernames: []
# phabricator::mysql::readonly_password_seed in private data
phabricator::mysql::conf::max_allowed_packet: 33554432
phabricator::mysql::conf::sql_mode: STRICT_ALL_TABLES
phabricator::mysql::conf::ft_stopword_file: "%{hiera('phabricator::basepath')}/phabricator/resources/sql/stopwords.txt"
phabricator::mysql::conf::ft_min_word_len: 3
phabricator::mysql::conf::ft_boolean_syntax: "' |-><()~*:\"\"&^'"
phabricator::mysql::conf::innodb_buffer_pool_size: 4G
phabricator::mysql::conf::innodb_file_per_table: TRUE
phabricator::mysql::conf::innodb_flush_method: O_DIRECT
phabricator::mysql::conf::innodb_log_file_size: 1G
phabricator::mysql::conf::max_connections: 16384
phabricator::php::fpm_listen: 127.0.0.1:9001
phabricator::php::max_file_size: 128M
phabricator::php::opcache_validate_timestamps: 0
# Must have a matching cert in letsencrypt::domains
phabricator::vhost::name: forge.softwareheritage.org
phabricator::vhost::docroot: "%{hiera('phabricator::basepath')}/phabricator/webroot"
phabricator::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}"
phabricator::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}"
phabricator::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}"
phabricator::vhost::hsts_header: "%{hiera('apache::hsts_header')}"
mediawiki::php::fpm_listen: 127.0.0.1:9002
mediawiki::vhosts:
# Must have matching certificates in letsencrypt::certificates
intranet.softwareheritage.org:
swh_logo: /images/9/99/Swh-intranet-logo.png
mysql:
username: mw_intranet
dbname: mediawiki_intranet
aliases: []
site_name: Software Heritage Intranet
wiki.softwareheritage.org:
swh_logo: /images/b/b2/Swh-logo.png
mysql:
username: mw_public
dbname: mediawiki_public
aliases: []
site_name: Software Heritage Wiki
mediawiki::vhost::docroot: /var/lib/mediawiki
mediawiki::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}"
mediawiki::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}"
mediawiki::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}"
mediawiki::vhost::hsts_header: "%{hiera('apache::hsts_header')}"
annex::basepath: /srv/softwareheritage/annex
# Must have matching certificate in letsencrypt::certificates
annex::vhost::name: annex.softwareheritage.org
annex::vhost::docroot: "%{hiera('annex::basepath')}/webroot"
annex::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}"
annex::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}"
annex::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}"
annex::vhost::hsts_header: "%{hiera('apache::hsts_header')}"
docs::basepath: /srv/softwareheritage/docs
# Must have matching certificate in letsencrypt::certificates
docs::vhost::name: docs.softwareheritage.org
docs::vhost::docroot: "%{hiera('docs::basepath')}/webroot"
docs::vhost::docroot_owner: "jenkins-push-docs"
docs::vhost::docroot_group: "www-data"
docs::vhost::docroot_mode: "2755"
docs::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}"
docs::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}"
docs::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}"
docs::vhost::hsts_header: "%{hiera('apache::hsts_header')}"
# Must have matching certificate in letsencrypt::certificates
bitbucket_archive::vhost::name: bitbucket-archive.softwareheritage.org
bitbucket_archive::vhost::docroot: /srv/softwareheritage/bitbucket-archive/webroot
bitbucket_archive::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}"
bitbucket_archive::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}"
bitbucket_archive::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}"
bitbucket_archive::vhost::hsts_header: "%{hiera('apache::hsts_header')}"
ssh::port: 22
ssh::permitrootlogin: without-password
swh::base_directory: /srv/softwareheritage
swh::conf_directory: /etc/softwareheritage
swh::log_directory: /var/log/softwareheritage
swh::global_conf::file: "%{hiera('swh::conf_directory')}/global.ini"
swh::apt_config::swh_repository::hostname: debian.softwareheritage.org
swh::apt_config::swh_repository: "https://%{hiera('swh::apt_config::swh_repository::hostname')}/"
swh::apt_config::enable_non_free: false
swh::apt_config::backported_packages:
stretch:
# For swh.scheduler
- python3-msgpack
# T1609
- python3-urllib3
- python3-requests
- python3-chardet
- python3-idna
buster:
# More recent systemd for new features like OOMPolicy
- libnss-myhostname
- libnss-mymachines
- libnss-resolve
- libnss-systemd
- libpam-systemd
- libsystemd-dev
- libsystemd0
- libudev-dev
- libudev1
- libudev1-udeb
- libzstd1
- systemd
- systemd-container
- systemd-coredump
- systemd-journal-remote
- systemd-sysv
- systemd-tests
- udev
- udev-udeb
# python3-msgpack 1.0 breaks the older versions of borgbackup
- borgbackup
# Needed for some swh packages
- python3-typing-extensions
debian_repository::basepath: "%{hiera('swh::base_directory')}/repository"
debian_repository::owner: swhdebianrepo
debian_repository::owner::homedir: /home/swhdebianrepo
debian_repository::group: swhdev
debian_repository::mode: "02775"
debian_repository::ssh_authorized_keys:
nicolasd@darboux:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAABAQDZ1TCpfzrvxLhEMhxjbxqPDCwY0nazIr1cyIbhGD2bUdAbZqVMdNtr7MeDnlLIKrIPJWuvltauvLNkYU0iLc1jMntdBCBM3hgXjmTyDtc8XvXseeBp5tDqccYNR/cnDUuweNcL5tfeu5kzaAg3DFi5Dsncs5hQK5KQ8CPKWcacPjEk4ir9gdFrtKG1rZmg/wi7YbfxrJYWzb171hdV13gSgyXdsG5UAFsNyxsKSztulcLKxvbmDgYbzytr38FK2udRk7WuqPbtEAW1zV4yrBXBSB/uw8EAMi+wwvLTwyUcEl4u0CTlhREljUx8LhYrsQUCrBcmoPAmlnLCD5Q9XrGH
jenkins@thyssen:
type: ssh-rsa
key: AAAAB3NzaC1yc2EAAAADAQABAAABAQCrfYnl8v4QK1ClkPMHO4WiPqgLVoOGpOPFUvg3WehMo8xMQ9e/EeZddQn96mhHkbbC5HCWEVK1VwafpIeadaMHnypdGhpapncYPpoKItxmf1IwVtlt/h8OYai5pTMCgkuOHjhnQdO20Amr9WMkoRZ/K7v/GijIZ6svvgWiYKfDnu0s1ziFYIT5rEA5hL9SqNJTlKdy2H68/7mmTii9NpBsGWQYDOjcrwELNOI5EUgQSOzmeKxecPkABfh/dezp6jmrv/2x7bm7LT46d+rnVDqVRiUrLVnLhrZCmZDxXfbEmftTdAoK8U/wjLreanRxKOc7arYRyKu0RbAaejPejzgR
debian_repository::gpg_keys:
# olasd
- 791F12396630DD71FD364375B8E5087766475AAF
- 6F339C5E1725D5E379100F096F31F7545A885252
# zack
- 4900707DDC5C07F2DECB02839C31503C6D866396
# ardumont
- BF00203D741AC9D546A8BE0752E2E9840D10C3B8
# anlambert
- 91FAF3F5CDE011E4FDF4CBF2D026E5C2F802586D
# seirl
- 225CD9E3FA9374BDF6E057042F8984858B1A9945
# vlorentz
- 379043E3DF96D3237E6782AC0E082B40E4376B1E
# ddouard
- 7DC7325EF1A6226AB6C3D7E32388A3BF6F0A6938
# jenkins-debian1
- 1F4BDC445E30C7066324D7B3D7D3329147AE3148
# vsellier
- 89226003C15CBC8C65C189C33F13C434EADAD17D
# Must have matching certificate in letsencrypt::certificates
debian_repository::vhost::name: "%{hiera('swh::apt_config::swh_repository::hostname')}"
debian_repository::vhost::aliases:
- debian.internal.softwareheritage.org
debian_repository::vhost::docroot: "%{hiera('debian_repository::basepath')}"
debian_repository::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}"
debian_repository::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}"
debian_repository::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}"
debian_repository::vhost::hsts_header: "%{hiera('apache::hsts_header')}"
swh::apt_config::debian_mirror::hostname: deb.debian.org
swh::apt_config::debian_mirror: "http://%{hiera('swh::apt_config::debian_mirror::hostname')}/debian/"
swh::apt_config::debian_security_mirror::hostname: "%{hiera('swh::apt_config::debian_mirror::hostname')}"
swh::apt_config::debian_security_mirror: "http://%{hiera('swh::apt_config::debian_mirror::hostname')}/debian-security/"
swh::apt_config::azure_repository::hostname: debian-archive.trafficmanager.net
swh::apt_config::azure_repository: "http://%{hiera('swh::apt_config::azure_repository::hostname')}/debian-azure/"
swh::apt_config::unattended_upgrades: true
swh::apt_config::unattended_upgrades::origins:
- "o=Debian,codename=${distro_codename}" # main Debian archive
- "o=Debian,codename=${distro_codename}-updates" # stable-updates (ex-volatile)
- "o=Debian,codename=${distro_codename},l=Debian-Security" # security updates (buster and below)
- "o=Debian,codename=${distro_codename}-security,l=Debian-Security" # security updates (bullseye and later)
- "o=debian icinga-stable,codename=icinga-${distro_codename}" # Icinga2 repository
- "o=Debian Azure,codename=${distro_codename}" # Debian Azure
- "o=Proxmox,codename=${distro_codename}" # Proxmox repository
- "o=packages.sury.org" # PHP backports (tate)
#####################################################################################################
# Remote service configurations
# Default ports
swh::remote_service::storage::port: 5002
swh::remote_service::objstorage::port: 5003
swh::remote_service::webapp::port: 5004
swh::remote_service::vault::port: 5005
swh::remote_service::deposit::port: 5006
swh::remote_service::indexer::port: 5007
swh::remote_service::scheduler::port: 5008
swh::remote_service::graph::port: 5009
swh::remote_service::search::port: 5010
swh::remote_service::counters::port: 5011
# Default backend services. Override in specific sites if needed. Configurations
# are split between read-only (the default) and writable storages. In most cases
# overrides should only happen for read-only services.
swh::remote_service::objstorage::config: "%{alias('swh::remote_service::objstorage::config::azure_readonly_with_fallback')}"
swh::remote_service::objstorage::config::writable: "%{alias('swh::remote_service::objstorage::config::saam')}"
swh::remote_service::objstorage::config_as_dict:
banco: "%{alias('swh::remote_service::objstorage::config::banco')}"
saam: "%{alias('swh::remote_service::objstorage::config::saam')}"
azure: "%{alias('swh::remote_service::objstorage::config::azure')}"
swh::remote_service::storage::config: "%{alias('swh::remote_service::storage::config::saam')}"
swh::remote_service::storage::config::writable: &swh_remote_service_storage_config_writable
"%{alias('swh::remote_service::storage::config::saam')}"
swh::remote_service::indexer::config: "%{alias('swh::remote_service::indexer::config::saam')}"
swh::remote_service::indexer::config::writable: "%{alias('swh::remote_service::indexer::config::saam')}"
swh::remote_service::scheduler::config: "%{alias('swh::remote_service::scheduler::config::saatchi')}"
swh::remote_service::scheduler::config::writable: "%{alias('swh::remote_service::scheduler::config::saatchi')}"
swh::remote_service::vault::config: "%{alias('swh::remote_service::vault::config::azure')}"
swh::remote_service::vault::config::writable: "%{alias('swh::remote_service::vault::config::azure')}"
# Pipeline storage with retry, filter, buffer and finally writable storage
swh::deploy::worker::storage::pipeline:
cls: pipeline
steps:
- cls: buffer
min_batch_size:
content: 1000
content_bytes: 52428800 # 50 MB
directory: 1000
directory_entries: 12000
revision: 1000
revision_parents: 2000
revision_bytes: 52428800
release: 1000
release_bytes: 52428800
extid: 1000
- cls: filter
- cls: retry
- "%{alias('swh::remote_service::storage::config::writable')}"
# Objstorage backend configurations
swh::remote_service::objstorage::config::azure:
cls: azure-prefixed
accounts:
"0":
account_name: 0euwestswh
api_secret_key: "%{hiera('swh::azure::credentials::0euwestswh')}"
container_name: contents
"1":
account_name: 1euwestswh
api_secret_key: "%{hiera('swh::azure::credentials::1euwestswh')}"
container_name: contents
"2":
account_name: 2euwestswh
api_secret_key: "%{hiera('swh::azure::credentials::2euwestswh')}"
container_name: contents
"3":
account_name: 3euwestswh
api_secret_key: "%{hiera('swh::azure::credentials::3euwestswh')}"
container_name: contents
"4":
account_name: 4euwestswh
api_secret_key: "%{hiera('swh::azure::credentials::4euwestswh')}"
container_name: contents
"5":
account_name: 5euwestswh
api_secret_key: "%{hiera('swh::azure::credentials::5euwestswh')}"
container_name: contents
"6":
account_name: 6euwestswh
api_secret_key: "%{hiera('swh::azure::credentials::6euwestswh')}"
container_name: contents
"7":
account_name: 7euwestswh
api_secret_key: "%{hiera('swh::azure::credentials::7euwestswh')}"
container_name: contents
"8":
account_name: 8euwestswh
api_secret_key: "%{hiera('swh::azure::credentials::8euwestswh')}"
container_name: contents
"9":
account_name: 9euwestswh
api_secret_key: "%{hiera('swh::azure::credentials::9euwestswh')}"
container_name: contents
"a":
account_name: aeuwestswh
api_secret_key: "%{hiera('swh::azure::credentials::aeuwestswh')}"
container_name: contents
"b":
account_name: beuwestswh
api_secret_key: "%{hiera('swh::azure::credentials::beuwestswh')}"
container_name: contents
"c":
account_name: ceuwestswh
api_secret_key: "%{hiera('swh::azure::credentials::ceuwestswh')}"
container_name: contents
"d":
account_name: deuwestswh
api_secret_key: "%{hiera('swh::azure::credentials::deuwestswh')}"
container_name: contents
"e":
account_name: eeuwestswh
api_secret_key: "%{hiera('swh::azure::credentials::eeuwestswh')}"
container_name: contents
"f":
account_name: feuwestswh
api_secret_key: "%{hiera('swh::azure::credentials::feuwestswh')}"
container_name: contents
swh::remote_service::objstorage::config::azure::readonly:
cls: filtered
storage_conf: "%{alias('swh::remote_service::objstorage::config::azure')}"
filters_conf:
- type: readonly
swh::remote_service::objstorage::config::saam:
cls: remote
url: "http://saam.internal.softwareheritage.org:%{hiera('swh::remote_service::objstorage::port')}/"
swh::remote_service::objstorage::config::saam::readonly:
cls: filtered
storage_conf: "%{alias('swh::remote_service::objstorage::config::saam')}"
filters_conf:
- type: readonly
swh::remote_service::objstorage::config::banco:
cls: remote
url: "http://banco.internal.softwareheritage.org:%{hiera('swh::remote_service::objstorage::port')}/"
swh::remote_service::objstorage::config::banco::readonly:
cls: filtered
storage_conf: "%{alias('swh::remote_service::objstorage::config::banco')}"
filters_conf:
- type: readonly
swh::remote_service::objstorage::config::azure_readonly_with_fallback: &swh_azure_readonly_with_fallback
cls: multiplexer
objstorages:
- "%{alias('swh::remote_service::objstorage::config::azure::readonly')}"
- "%{alias('swh::remote_service::objstorage::config::banco::readonly')}"
- "%{alias('swh::remote_service::objstorage::config::saam::readonly')}"
swh::remote_service::objstorage::config::localhost:
cls: remote
url: "http://127.0.0.1:%{hiera('swh::remote_service::objstorage::port')}/"
# Storage backend configurations
swh::remote_service::storage::config::saam:
cls: remote
url: "http://saam.internal.softwareheritage.org:%{hiera('swh::remote_service::storage::port')}/"
swh::remote_service::storage::config::azure:
cls: remote
url: "http://storage01.euwest.azure.internal.softwareheritage.org:%{hiera('swh::remote_service::storage::port')}/"
swh::remote_service::storage::config::local_internal_network:
cls: remote
url: "http://%{lookup('swh::deploy::storage::backend::listen::host')}:%{lookup('swh::remote_service::storage::port')}/"
swh::remote_service::search::config::empty: {}
swh::remote_service::search::config::local_internal_network:
cls: remote
url: "http://%{lookup('swh::deploy::search::backend::listen::host')}:%{hiera('swh::remote_service::search::port')}/"
swh::remote_service::search::config::search1:
cls: remote
url: "http://search1.internal.softwareheritage.org:%{hiera('swh::remote_service::search::port')}/"
swh::remote_service::search::config: "%{alias('swh::remote_service::search::config::local_internal_network')}"
# Indexer backend configurations
swh::remote_service::indexer::config::saam:
cls: remote
url: "http://saam.internal.softwareheritage.org:%{hiera('swh::remote_service::indexer::port')}/"
swh::remote_service::indexer::config::azure:
cls: remote
url: "http://storage01.euwest.azure.internal.softwareheritage.org:%{hiera('swh::remote_service::indexer::port')}/"
# Scheduler backend configurations
swh::remote_service::scheduler::config::saatchi:
cls: remote
url: "http://saatchi.internal.softwareheritage.org:%{hiera('swh::remote_service::scheduler::port')}/"
# Vault backend configurations
swh::remote_service::vault::config::azure:
cls: remote
url: "http://vangogh.euwest.azure.internal.softwareheritage.org:%{hiera('swh::remote_service::vault::port')}/"
# Counters backend configurations
swh::remote_service::counters::url: "http://counters1.internal.softwareheritage.org:%{hiera('swh::remote_service::counters::port')}/"
swh::remote_service::counters::config:
cls: remote
url: "%{alias('swh::remote_service::counters::url')}"
swh::remote_service::graph::config:
url: "http://graph.internal.softwareheritage.org:%{hiera('swh::remote_service::graph::port')}/"
# End remote service configurations
#####################################################################################################
swh::deploy::db::pgbouncer::port: 5432
swh::deploy::db::main::port: 5433
swh::deploy::db::secondary::port: 5434
swh::deploy::db::indexer::port: 5435
swh::deploy::db::pgbouncer::user::login: postgres
swh::deploy::db::pgbouncer::user::password: "%{alias('swh::deploy::db::postgres::password')}"
pgbouncer::common::listen_addresses:
- 127.0.0.1
- 127.0.1.1
- "%{alias('pgbouncer::listen_addr')}"
pgbouncer::config_params:
logfile: /var/log/postgresql/pgbouncer.log
pidfile: /var/run/postgresql/pgbouncer.pid
unix_socket_dir: /var/run/postgresql
client_tls_sslmode: allow
client_tls_ca_file: /etc/ssl/certs/ssl-cert-snakeoil.pem
client_tls_key_file: /etc/ssl/private/ssl-cert-snakeoil.key
client_tls_cert_file: /etc/ssl/certs/ssl-cert-snakeoil.pem
server_tls_sslmode: allow
listen_port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
listen_addr: "%{alias('pgbouncer::common::listen_addresses')}"
auth_type: "hba"
auth_file: /etc/pgbouncer/userlist.txt
auth_hba_file: "%{hiera('pgbouncer::auth_hba_file')}"
admin_users:
- "%{hiera('swh::deploy::db::pgbouncer::user::login')}"
- olasd
pool_mode: session
ignore_startup_parameters: extra_float_digits
server_reset_query: DISCARD ALL
max_client_conn: 2000
default_pool_size: 2000
max_db_connections: 2000
max_user_connections: 2000
log_connections: 0
log_disconnections: 0
pgbouncer::user: postgres
pgbouncer::group: postgres
# swh::deploy::db::pgbouncer::user::password in private data
pgbouncer::userlist:
- user: "%{hiera('swh::deploy::db::pgbouncer::user::login')}"
password: "%{hiera('swh::deploy::db::pgbouncer::user::password')}"
pgbouncer::databases: []
swh::deploy::directory: "%{hiera('swh::conf_directory')}/deploy"
swh::deploy::group: swhdeploy
swh::deploy::public_key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWrJX/uUss/EYZaTp2EIsZgg3ZSH8JcNZV5gBdNZ7EHcQcqxYUCqmwv9Ss3xT8n9kIrH6iz/vquqf84XR+keoZK3bsp50tMOY8LJWpcl/JK2XD6ovoJrHPu+iAroLkE59RdTa1Vz+jF67Q2UuG9f0nKwL4rnkeWTyuK/zAbyHyYKFQntkkwMr5/YTU8sjl/4aNF/2Ww8hitdi2GORlCjav2bB0wyPBA2e8sMt8Hp9O4TIWg/RD6vPX+ZvuFaB/Lw/Hv21622QGTHoZiO92/8/W9/t24il6SU4z96ZGfXqdUZkpPYKBGwyIkZkS4dN6jb4CcRlyXTObphyu3dAlABRt swhworker@worker01'
swh::deploy::storage::sentry_swh_package: swh.storage
swh::deploy::storage::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::storage::sentry_dsn: "https://%{lookup('swh::deploy::storage::sentry_token')}@sentry.softwareheritage.org/3"
swh::deploy::storage::conf_directory: "%{hiera('swh::conf_directory')}/storage"
swh::deploy::storage::conf_file: "%{hiera('swh::deploy::storage::conf_directory')}/storage.yml"
swh::deploy::storage::user: swhstorage
swh::deploy::storage::group: swhstorage
swh::deploy::storage::db::host: db.internal.softwareheritage.org
swh::deploy::storage::db::port: "%{alias('swh::deploy::db::pgbouncer::port')}"
swh::deploy::storage::db::user: swhstorage
swh::deploy::storage::db::dbname: softwareheritage
swh::deploy::storage::directory: "%{hiera('swh::base_directory')}/objects"
swh::deploy::storage::backend::listen::host: 127.0.0.1
swh::deploy::storage::backend::listen::port: "%{alias('swh::remote_service::storage::port')}"
swh::deploy::storage::backend::workers: 4
swh::deploy::storage::backend::reload_mercy: 3600
swh::deploy::storage::backend::http_keepalive: 5
swh::deploy::storage::backend::http_timeout: 3600
swh::deploy::storage::backend::max_requests: 10000
swh::deploy::storage::backend::max_requests_jitter: 1000
swh::deploy::storage::backend::server_names:
- "%{::swh_hostname.internal_fqdn}"
- "%{::hostname}"
- 127.0.0.1
- localhost
- "::1"
# This can be overriden per storage node
swh::deploy::storage::config::local:
cls: postgresql
db: "host=%{hiera('swh::deploy::storage::db::host')} port=%{hiera('swh::deploy::storage::db::port')} user=%{hiera('swh::deploy::storage::db::user')} dbname=%{hiera('swh::deploy::storage::db::dbname')} password=%{hiera('swh::deploy::storage::db::password')}"
objstorage: "%{alias('swh::remote_service::objstorage::config')}"
swh::deploy::storage::config:
storage: "%{alias('swh::deploy::storage::config::local')}"
swh::deploy::journal::writer::config:
cls: kafka
brokers: "%{alias('swh::deploy::journal::brokers')}"
prefix: "%{alias('swh::deploy::journal::prefix')}"
client_id: "swh.storage.journal_writer.%{::swh_hostname.short}"
anonymize: true
producer_config:
message.max.bytes: 1000000000
swh::deploy::journal::backfill::config_file: "%{hiera('swh::deploy::journal::conf_directory')}/backfill.yml"
swh::deploy::journal::backfill::user: swhstorage
swh::deploy::journal::backfill::group: swhstorage
swh::deploy::storage::db::config::read-only: "host=%{hiera('swh::deploy::storage::db::host')} port=%{hiera('swh::deploy::storage::db::port')} user=guest dbname=%{hiera('swh::deploy::storage::db::dbname')} password=guest"
swh::deploy::storage::db::secondary::config::read-only: "host=somerset.internal.softwareheritage.org port=%{hiera('swh::deploy::storage::db::port')} user=guest dbname=%{hiera('swh::deploy::storage::db::dbname')} password=guest"
swh::deploy::storage::config::local-read-only:
cls: postgresql
db: "%{alias('swh::deploy::storage::db::config::read-only')}"
# objstorage is required but it's not used in that context
objstorage:
cls: memory
swh::deploy::journal::backfill::config:
storage: "%{alias('swh::deploy::storage::config::local-read-only')}"
journal_writer: "%{alias('swh::deploy::journal::writer::config')}"
swh::deploy::journal::backfill::config_logging_file: "%{hiera('swh::deploy::journal::conf_directory')}/backfill_logger.yml"
swh::deploy::journal::backfill::config_logging:
version: 1
handlers:
console:
class: logging.StreamHandler
formatter: detail
level: INFO
stream: ext://sys.stdout
formatters:
detail:
format: '%(asctime)s %(levelname)-8s %(name)-15s %(message)s'
datefmt: '%Y-%m-%dT%H:%M:%S'
loggers:
swh: {}
root:
level: INFO
handlers:
- console
swh::deploy::indexer::storage::sentry_swh_package: swh.indexer
swh::deploy::indexer::storage::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::indexer::storage::sentry_dsn: "%{alias('swh::deploy::indexer::sentry_dsn')}"
swh::deploy::indexer::storage::conf_file: "%{hiera('swh::deploy::storage::conf_directory')}/indexer.yml"
swh::deploy::indexer::storage::user: swhstorage
swh::deploy::indexer::storage::group: swhstorage
swh::deploy::indexer::storage::db::host: belvedere.internal.softwareheritage.org
swh::deploy::indexer::storage::db::port: "%{alias('swh::deploy::db::pgbouncer::port')}"
swh::deploy::indexer::storage::db::user: swhstorage
swh::deploy::indexer::storage::db::dbname: softwareheritage-indexer
swh::deploy::indexer::storage::backend::listen::host: 127.0.0.1
swh::deploy::indexer::storage::backend::listen::port: "%{alias('swh::remote_service::indexer::port')}"
swh::deploy::indexer::storage::backend::workers: 4
swh::deploy::indexer::storage::backend::reload_mercy: 3600
swh::deploy::indexer::storage::backend::http_keepalive: 5
swh::deploy::indexer::storage::backend::http_timeout: 3600
swh::deploy::indexer::storage::backend::max_requests: 10000
swh::deploy::indexer::storage::backend::max_requests_jitter: 1000
swh::deploy::indexer::storage::backend::server_names:
- "%{::swh_hostname.internal_fqdn}"
- "%{::hostname}"
- 127.0.0.1
- localhost
- "::1"
swh::deploy::provenance::db::dbname: swh-provenance
swh::deploy::provenance::db::user: swh-provenance
swh::deploy::provenance::db::host: met.internal.softwareheritage.org
swh::deploy::indexer::storage::config:
indexer_storage:
cls: postgresql
db: "host=%{hiera('swh::deploy::indexer::storage::db::host')} port=%{hiera('swh::deploy::indexer::storage::db::port')} user=%{hiera('swh::deploy::indexer::storage::db::user')} dbname=%{hiera('swh::deploy::indexer::storage::db::dbname')} password=%{hiera('swh::deploy::indexer::storage::db::password')}"
journal_writer: "%{alias('swh::deploy::indexer::journal::writer::config')}"
swh::deploy::indexer::journal::prefix: 'swh.journal.indexed'
swh::deploy::indexer::brokers: "%{alias('swh::deploy::journal::brokers')}"
swh::deploy::indexer::journal::writer::config:
cls: kafka
brokers: "%{alias('swh::deploy::indexer::brokers')}"
prefix: "%{alias('swh::deploy::indexer::journal::prefix')}"
client_id: "swh.idx_storage.journal_writer.%{::swh_hostname.short}"
producer_config:
message.max.bytes: 1000000000
swh::deploy::vault::cache: "%{hiera('swh::base_directory')}/vault_cache"
# Default cache a pathslicing objstorage
swh::deploy::vault::config::cache:
cls: pathslicing
root: "%{hiera('swh::deploy::vault::cache')}"
slicing: "0:1/1:5"
swh::deploy::vault::sentry_swh_package: swh.vault
swh::deploy::vault::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::vault::sentry_dsn: "https://%{lookup('swh::deploy::vault::sentry_token')}@sentry.softwareheritage.org/11"
swh::deploy::vault::conf_directory: "%{hiera('swh::conf_directory')}/vault"
swh::deploy::vault::conf_file: "%{hiera('swh::deploy::vault::conf_directory')}/server.yml"
swh::deploy::vault::user: swhvault
swh::deploy::vault::group: swhvault
swh::deploy::vault::db::host: db.internal.softwareheritage.org
swh::deploy::vault::db::port: "%{alias('swh::deploy::db::pgbouncer::port')}"
swh::deploy::vault::db::user: swh-vault
swh::deploy::vault::db::dbname: swh-vault
swh::deploy::vault::backend::listen::host: 127.0.0.1
swh::deploy::vault::backend::listen::port: "%{alias('swh::remote_service::vault::port')}"
swh::deploy::vault::backend::workers: 4
swh::deploy::vault::backend::reload_mercy: 3600
swh::deploy::vault::backend::http_keepalive: 5
swh::deploy::vault::backend::http_timeout: 3600
swh::deploy::vault::backend::max_requests: 10000
swh::deploy::vault::backend::max_requests_jitter: 1000
swh::deploy::vault::backend::server_names:
- "%{::swh_hostname.internal_fqdn}"
- "%{::hostname}"
- 127.0.0.1
- localhost
- "::1"
swh::deploy::vault::config:
objstorage: "%{alias('swh::remote_service::objstorage::config')}"
storage:
cls: retry
storage: "%{alias('swh::remote_service::storage::config')}"
scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}"
cache: "%{alias('swh::deploy::vault::config::cache')}"
smtp:
host: localhost
port: 25
vault:
cls: postgresql
db: "host=%{hiera('swh::deploy::vault::db::host')} port=%{hiera('swh::deploy::vault::db::port')} user=%{hiera('swh::deploy::vault::db::user')} dbname=%{hiera('swh::deploy::vault::db::dbname')} password=%{hiera('swh::deploy::vault::db::password')}"
swh::deploy::graph::user: swhworker
swh::deploy::graph::group: swhworker
swh::deploy::graph::conf_directory: "%{hiera('swh::conf_directory')}/graph"
swh::deploy::graph::http::listen::host: 0.0.0.0
swh::deploy::graph::http::listen::port: "%{alias('swh::remote_service::graph::port')}"
swh::deploy::graph::grpc::listen::host: 0.0.0.0
swh::deploy::graph::grpc::listen::port: 50091 # hardcoded in swh.graph
swh::deploy::graph::grpc::max_heap: 500g
swh::deploy::graph::sentry_swh_package: swh.graph
swh::deploy::graph::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::graph::sentry_dsn: "https://%{lookup('swh::deploy::graph::sentry_token')}@sentry.softwareheritage.org/20"
swh::deploy::journal::conf_directory: "%{hiera('swh::conf_directory')}/journal"
swh::deploy::journal::brokers:
- kafka1.internal.softwareheritage.org
- kafka2.internal.softwareheritage.org
- kafka3.internal.softwareheritage.org
- kafka4.internal.softwareheritage.org
swh::deploy::journal::prefix: swh.journal.objects
swh::deploy::scrubber::db::user: swh-scrubber
swh::deploy::scrubber::db::dbname: swh-scrubber
swh::deploy::scrubber::db::host: db.internal.softwareheritage.org
swh::deploy::scrubber::db::port: "%{alias('swh::deploy::db::pgbouncer::port')}"
swh::deploy::scrubber::sentry_swh_package: swh.scrubber
swh::deploy::scrubber::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::scrubber::sentry_dsn: "https://%{lookup('swh::deploy::scrubber::sentry_token')}@sentry.softwareheritage.org/23"
swh::deploy::scrubber::checker::postgres::conf_directory: "%{hiera('swh::conf_directory')}/scrubber"
swh::deploy::scrubber::checker::postgres::user: swhworker
swh::deploy::scrubber::checker::postgres::group: swhdev
swh::deploy::scrubber::checker::postgres::object_types:
- directory
- revision
- release
- snapshot
swh::deploy::scrubber::checker::postgres::ranges:
- 0000000000000000000000000000000000000000:3fffffffffffffffffffffffffffffffffffffff
- 4000000000000000000000000000000000000000:7fffffffffffffffffffffffffffffffffffffff
- 8000000000000000000000000000000000000000:afffffffffffffffffffffffffffffffffffffff
- b000000000000000000000000000000000000000:ffffffffffffffffffffffffffffffffffffffff
swh::deploy::scrubber::checker::postgres::config_per_db:
primary:
config:
scrubber_db:
cls: postgresql
db: "%{alias('swh::deploy::scrubber::db::config')}"
storage:
cls: postgresql
db: "%{alias('swh::deploy::storage::db::config::read-only')}"
objstorage:
cls: noop
# password entry in private-data
swh::deploy::scrubber::db::config: "host=%{hiera('swh::deploy::scrubber::db::host')} port=%{hiera('swh::deploy::db::pgbouncer::port')} dbname=%{hiera('swh::deploy::scrubber::db::dbname')} user=%{hiera('swh::deploy::scrubber::db::user')} password=%{hiera('swh::deploy::scrubber::db::password')}"
swh::deploy::scrubber::checker::postgres::config:
scrubber_db:
cls: postgresql
db: "%{alias('swh::deploy::scrubber::db::config')}"
storage:
cls: postgresql
db: "%{alias('swh::deploy::storage::db::config::read-only')}"
objstorage:
cls: noop
swh::deploy::journal_simple_checker_producer::conf_file: "%{hiera('swh::deploy::journal::conf_directory')}/checker.yml"
swh::deploy::journal_simple_checker_producer::user: swhstorage
swh::deploy::journal_simple_checker_producer::group: swhstorage
swh::deploy::journal_simple_checker_producer::config:
brokers: "%{alias('swh::deploy::journal::brokers')}"
temporary_prefix: swh.tmp_journal.new
storage_dbconn: "host=%{hiera('swh::deploy::storage::db::host')} port=%{hiera('swh::deploy::storage::db::port')} user=%{hiera('swh::deploy::storage::db::user')} dbname=%{hiera('swh::deploy::storage::db::dbname')} password=%{hiera('swh::deploy::storage::db::password')}"
object_types:
- content
- directory
- revision
- release
- origin
- origin_visit
swh::deploy::objstorage::sentry_swh_package: swh.objstorage
swh::deploy::objstorage::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::objstorage::sentry_dsn: "https://%{lookup('swh::deploy::objstorage::sentry_token')}@sentry.softwareheritage.org/4"
swh::deploy::objstorage::conf_directory: "%{hiera('swh::conf_directory')}/objstorage"
swh::deploy::objstorage::conf_file: "%{hiera('swh::deploy::objstorage::conf_directory')}/server.yml"
swh::deploy::objstorage::user: "%{hiera('swh::deploy::storage::user')}"
swh::deploy::objstorage::group: "%{hiera('swh::deploy::storage::group')}"
swh::deploy::objstorage::directory: "%{hiera('swh::deploy::storage::directory')}"
swh::deploy::objstorage::slicing: 0:2/2:4/4:6
swh::deploy::objstorage::config:
objstorage:
cls: pathslicing
root: "%{hiera('swh::deploy::objstorage::directory')}"
slicing: "%{hiera('swh::deploy::objstorage::slicing')}"
client_max_size: 1073741824 # 1 GiB
swh::deploy::objstorage::backend::listen::host: 127.0.0.1
swh::deploy::objstorage::backend::listen::port: "%{alias('swh::remote_service::objstorage::port')}"
swh::deploy::objstorage::backend::workers: 4
swh::deploy::objstorage::backend::reload_mercy: 3600
swh::deploy::objstorage::backend::http_workers: 1
swh::deploy::objstorage::backend::http_keepalive: 5
swh::deploy::objstorage::backend::http_timeout: 3600
swh::deploy::objstorage::backend::max_requests: 0
swh::deploy::objstorage::backend::max_requests_jitter: 0
swh::deploy::objstorage::backend::server_names:
- "%{::swh_hostname.internal_fqdn}"
- "%{::hostname}"
- 127.0.0.1
- localhost
- "::1"
# read-only storage use basic auth
# it returns a 401 Restricted return code without
# any content
swh::deploy::objstorage::icinga_check_string: ''
swh::deploy::objstorage::reverse_proxy::backend_http_port: "%{lookup('swh::remote_service::objstorage::port')}"
swh::deploy::objstorage::reverse_proxy::basic_auth: true
swh::deploy::objstorage::reverse_proxy::basic_auth::users:
- swh-prod
- enea-prod
swh::deploy::deposit::reverse_proxy::backend_http_port: "%{alias('varnish::backend_http_port')}"
# aliases are pulled from letsencrypt::certificates[$swh::deploy::deposit::vhost::letsencrypt_cert]
swh::deploy::deposit::vhost::letsencrypt_cert: deposit_production
swh::deploy::deposit::url: https://deposit.softwareheritage.org
swh::deploy::deposit::internal_url: "%{hiera('swh::deploy::deposit::url')}"
swh::deploy::deposit::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}"
swh::deploy::deposit::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}"
swh::deploy::deposit::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}"
swh::deploy::deposit::vhost::access_log_format: combined_with_duration
swh::deploy::deposit::locked_endpoints:
- /1/private/[^/]+/[^/]+/[^/]+
- /1/private/deposits/
# e2e vault
swh::deploy::vault::e2e::webapp: "%{alias('swh::deploy::webapp::url')}"
# e2e save code now
swh::deploy::savecodenow::e2e::activate: true # to be deactivated on secondaries webapps
swh::deploy::savecodenow::e2e::origins:
- name: git
origin:
- https://github.com/rdicosmo/parmap
- https://github.com/SoftwareHeritage/swh-core
- https://github.com/SoftwareHeritage/swh-lister
- https://github.com/SoftwareHeritage/swh-loader-bzr
- https://github.com/SoftwareHeritage/swh-loader-core
- https://github.com/SoftwareHeritage/swh-loader-git
- https://github.com/SoftwareHeritage/swh-loader-mercurial
- https://github.com/SoftwareHeritage/swh-loader-svn
- https://github.com/SoftwareHeritage/swh-model
- https://github.com/SoftwareHeritage/swh-web
type: git
- name: subversion
origin:
- https://subversion.renater.fr/anonscm/svn/panda
- https://svn.code.sf.net/p/freeorion/code
- https://svn.code.sf.net/p/swig/code
- https://svn.code.sf.net/p/zookeeper/code
- https://svn.code.sf.net/p/esitools/code
- https://svn.code.sf.net/p/sauerbraten/code/ # existing origin has a trailing /
- https://svn.code.sf.net/p/civ4mods/code
- https://svn.code.sf.net/p/ldap-sdk/code
type: svn
- name: mercurial
origin:
- https://foss.heptapod.net/mercurial/hgview
- https://foss.heptapod.net/mercurial/tortoisehg
- https://foss.heptapod.net/mercurial/evolve
- https://foss.heptapod.net/mercurial/hg-git
- https://foss.heptapod.net/mercurial/hg-docgraph
- https://foss.heptapod.net/mercurial/mercurial-devel
- https://foss.heptapod.net/mercurial/pytest
- https://foss.heptapod.net/mercurial/scmperf
type: hg
swh::deploy::savecodenow::e2e::webapp: "%{alias('swh::deploy::webapp::url')}"
# e2e checks on deposit
swh::deploy::deposit::e2e::server: "%{hiera('swh::deploy::deposit::url')}/1"
swh::deploy::deposit::e2e::user: swh
swh::deploy::deposit::e2e::collection: swh
swh::deploy::deposit::e2e::provider_url: https://www.softwareheritage.org
swh::deploy::deposit::e2e::swh_web_url: "%{alias('swh::deploy::webapp::url')}"
swh::deploy::deposit::e2e::poll_interval: 1
swh::deploy::deposit::e2e::archive: /usr/share/swh/icinga-plugins/data/deposit/jesuisgpl.tgz
swh::deploy::deposit::e2e::metadata: /usr/share/swh/icinga-plugins/data/deposit/jesuisgpl.tgz.xml
swh::deploy::deposit::sentry_swh_package: swh.deposit
swh::deploy::deposit::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::deposit::sentry_dsn: "https://%{lookup('swh::deploy::deposit::sentry_token')}@sentry.softwareheritage.org/12"
swh::deploy::deposit::config_directory: "%{hiera('swh::conf_directory')}/deposit"
swh::deploy::deposit::config_file: "%{hiera('swh::deploy::deposit::config_directory')}/server.yml"
swh::deploy::deposit::user: swhdeposit
swh::deploy::deposit::group: swhdeposit
swh::deploy::deposit::media_root_directory: /srv/storage/space/swh-deposit/uploads/
swh::deploy::deposit::db::host: db.internal.softwareheritage.org
swh::deploy::deposit::db::port: "%{alias('swh::deploy::db::pgbouncer::port')}"
swh::deploy::deposit::db::dbname: softwareheritage-deposit
swh::deploy::deposit::db::dbuser: swhstorage
swh::config::keycloak::realm_name: SoftwareHeritage
swh::deploy::deposit::config::keycloak:
server_url: "https://%{hiera('keycloak::vhost::name')}/auth/"
realm_name: "%{alias('swh::config::keycloak::realm_name')}"
swh::deploy::deposit::config::authentication:
authentication_provider: keycloak
keycloak: "%{alias('swh::deploy::deposit::config::keycloak')}"
cache_uri: "%{hiera('memcached::server::bind')}:%{hiera('memcached::server::port')}"
# swh::deploy::deposit::db::password: in private data
# swh::deploy::deposit::runtime_secret_key in private data
swh::deploy::deposit::config:
max_upload_size: 209715200
scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}"
# The backend server writes raw_extrinsic_metadata objects
storage_metadata: "%{alias('swh::remote_service::storage::config::writable')}"
# It only reads some information from this storage, but use the same config for consistency.
storage: "%{alias('swh::remote_service::storage::config::writable')}"
private:
secret_key: "%{hiera('swh::deploy::deposit::runtime_secret_key')}"
db:
host: "%{hiera('swh::deploy::deposit::db::host')}"
port: "%{hiera('swh::deploy::deposit::db::port')}"
name: "%{hiera('swh::deploy::deposit::db::dbname')}"
user: "%{hiera('swh::deploy::deposit::db::dbuser')}"
password: "%{hiera('swh::deploy::deposit::db::password')}"
media_root: "%{hiera('swh::deploy::deposit::media_root_directory')}"
extraction_dir: /tmp/swh-deposit/archive/
swh_authority_url: "%{hiera('swh::deploy::deposit::url')}/"
swh::deploy::worker::loader::max_content_size: 104857600
swh::deploy::worker::loader_deposit::config_file: "%{hiera('swh::conf_directory')}/loader_deposit.yml"
swh::deploy::worker::loader_deposit::concurrency: 1
swh::deploy::worker::loader_deposit::private_tmp: true
swh::deploy::worker::loader_deposit::loglevel: info
# deposit_basic_auth_swhworker_{username|password} in private_data
swh::deploy::worker::loader_deposit::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues:
- swh.loader.package.deposit.tasks.LoadDeposit
deposit:
url: "%{alias('swh::deploy::webapp::deposit::private::url')}"
auth:
username: "%{hiera('deposit_basic_auth_swhworker_username')}"
password: "%{hiera('deposit_basic_auth_swhworker_password')}"
default_filename: archive.tar
swh::deploy::checker_deposit::sentry_swh_package: swh.deposit.loader
swh::deploy::checker_deposit::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::worker::checker_deposit::config_file: "%{hiera('swh::conf_directory')}/checker_deposit.yml"
swh::deploy::worker::checker_deposit::concurrency: 1
swh::deploy::worker::checker_deposit::private_tmp: true
swh::deploy::worker::checker_deposit::loglevel: info
# deposit_basic_auth_swhworker_{username|password} in private_data
swh::deploy::worker::checker_deposit::config:
storage: "%{alias('swh::remote_service::storage::config::writable')}"
extraction_dir: /tmp/swh.checker.deposit/
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_modules:
- swh.deposit.loader.tasks
task_queues:
- swh.deposit.loader.tasks.ChecksDepositTsk
deposit:
url: "%{hiera('swh::deploy::deposit::internal_url')}/1/private/"
auth:
username: "%{hiera('deposit_basic_auth_swhworker_username')}"
password: "%{hiera('deposit_basic_auth_swhworker_password')}"
swh::deploy::deposit::backend::listen::host: 127.0.0.1
swh::deploy::deposit::backend::listen::port: "%{alias('swh::remote_service::deposit::port')}"
swh::deploy::deposit::backend::workers: 8
swh::deploy::deposit::backend::reload_mercy: 3600
swh::deploy::deposit::backend::http_keepalive: 5
swh::deploy::deposit::backend::http_timeout: 3600
swh::deploy::objstorage_log_checker::conf_directory: "%{hiera('swh::deploy::objstorage::conf_directory')}"
swh::deploy::objstorage_log_checker::conf_file: "%{hiera('swh::deploy::objstorage_log_checker::conf_directory')}/log_checker.yml"
swh::deploy::objstorage_log_checker::user: "%{hiera('swh::deploy::objstorage::user')}"
swh::deploy::objstorage_log_checker::group: "%{hiera('swh::deploy::objstorage::group')}"
swh::deploy::objstorage_log_checker:config:
storage:
cls: pathslicing
root: "%{hiera('swh::deploy::objstorage::directory')}"
slicing: "%{hiera('swh::deploy::objstorage::slicing')}"
batch_size: 1000
log_tag: objstorage.checker.log
swh::deploy::objstorage_repair_checker::conf_directory: "%{hiera('swh::deploy::objstorage::conf_directory')}"
swh::deploy::objstorage_repair_checker::conf_file: "%{hiera('swh::deploy::objstorage_repair_checker::conf_directory')}/repair_checker.yml"
swh::deploy::objstorage_repair_checker::user: "%{hiera('swh::deploy::objstorage::user')}"
swh::deploy::objstorage_repair_checker::group: "%{hiera('swh::deploy::objstorage::group')}"
swh::deploy::objstorage_repair_checker::config:
storage:
cls: pathslicing
root: "%{hiera('swh::deploy::objstorage::directory')}"
slicing: "%{hiera('swh::deploy::objstorage::slicing')}"
batch_size: 1000
log_tag: objstorage.checker.repair
backup_storages: "%{alias('swh::remote_service::objstorage::config_as_dict')}"
swh::deploy::webapp::backported_packages:
stretch:
- python3-django
- python-django-common
buster:
- python3-django
- python3-typing-extensions
swh::deploy::deposit::backported_packages: "%{alias('swh::deploy::webapp::backported_packages')}"
swh::deploy::webapp::sentry_swh_package: swh.web
swh::deploy::webapp::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::webapp::sentry_dsn: "https://%{lookup('swh::deploy::webapp::sentry_token')}@sentry.softwareheritage.org/13"
swh::deploy::webapp::conf_directory: "%{hiera('swh::conf_directory')}/web"
swh::deploy::webapp::conf_file: "%{hiera('swh::deploy::webapp::conf_directory')}/web.yml"
swh::deploy::webapp::user: swhwebapp
swh::deploy::webapp::group: swhwebapp
swh::deploy::webapp::conf::log_dir: "%{hiera('swh::log_directory')}/webapp"
swh::deploy::webapp::backend::listen::host: 127.0.0.1
swh::deploy::webapp::backend::listen::port: "%{alias('swh::remote_service::webapp::port')}"
swh::deploy::webapp::backend::workers: 32
swh::deploy::webapp::backend::http_keepalive: 5
swh::deploy::webapp::backend::http_timeout: 3600
swh::deploy::webapp::backend::reload_mercy: 3600
swh::deploy::webapp::db::host: db.internal.softwareheritage.org
swh::deploy::webapp::db::user: swh-web
swh::deploy::webapp::db::name: swh-web
swh::deploy::webapp::db::port: "%{alias('swh::deploy::db::secondary::port')}"
# swh::deploy::webapp::db::password in private data
swh::deploy::webapp::production_db:
host: "%{alias('swh::deploy::webapp::db::host')}"
port: "%{alias('swh::deploy::db::pgbouncer::port')}"
name: "%{alias('swh::deploy::webapp::db::name')}"
user: "%{alias('swh::deploy::webapp::db::user')}"
password: "%{alias('swh::deploy::webapp::db::password')}"
swh::deploy::webapp::icinga_check_string: 'archive'
swh::deploy::webapp::reverse_proxy::backend_http_port: "%{alias('varnish::backend_http_port')}"
swh::deploy::webapp::django_settings_module: swh.web.settings.production
swh::deploy::webapp::timers_enabled: false
# aliases are pulled from letsencrypt::certificates[$swh::deploy::webapp::vhost::letsencrypt_cert]
swh::deploy::webapp::vhost::letsencrypt_cert: archive_production
swh::deploy::webapp::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}"
swh::deploy::webapp::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}"
swh::deploy::webapp::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}"
swh::deploy::webapp::vhost::hsts_header: "%{hiera('apache::hsts_header')}"
swh::deploy::webapp::vhost::access_log_format: combined_with_duration
swh::deploy::webapp::inbound_email::production::domain: archive.softwareheritage.org
swh::deploy::webapp::inbound_email::staging::domain: webapp.staging.swh.network
swh::deploy::webapp::config::es_workers_index_url: http://esnode1.internal.softwareheritage.org:9200/swh_workers-*
swh::deploy::webapp::deposit::private::url: "%{hiera('swh::deploy::deposit::internal_url')}/1/private/"
swh::deploy::webapp::config::throttling:
cache_uri: "%{hiera('memcached::server::bind')}:%{hiera('memcached::server::port')}"
scopes:
swh_api:
limiter_rate:
default: 120/h
exempted_networks:
- 127.0.0.0/8
- 192.168.100.0/23
- 128.93.166.14
- 131.107.174.0/24
# OpenAIRE
- 213.135.60.145
- 213.135.60.146
# DINSIC
- 37.187.137.47
# Antoine Eiche
- 37.187.96.121
swh_api_origin_search:
limiter_rate:
default: 10/m
swh_api_origin_visit_latest:
# This endpoint gets called a lot (by default, up to 70 times
# per origin search), so it deserves a much higher rate-limit
# than the rest of the API.
limiter_rate:
default: 700/m
swh_vault_cooking:
limiter_rate:
default: 120/h
GET: 60/m
exempted_networks:
- 127.0.0.0/8
- 192.168.100.0/23
- 128.93.166.14
- 131.107.174.0/24
# OpenAIRE
- 213.135.60.145
- 213.135.60.146
# Antoine Eiche
- 37.187.96.121
swh_save_origin:
limiter_rate:
default: 120/h
POST: 10/h
exempted_networks:
- 127.0.0.0/8
- 192.168.100.0/23
- 128.93.166.14
- 131.107.174.0/24
# OpenAIRE
- 213.135.60.145
- 213.135.60.146
# Antoine Eiche
- 37.187.96.121
swh_raw_object:
limiter_rate:
default: 120/h
swh::deploy::webapp::config::keycloak:
server_url: "https://%{hiera('keycloak::vhost::name')}/auth/"
realm_name: "%{alias('swh::config::keycloak::realm_name')}"
swh::deploy::webapp::config::swh_extra_django_apps:
- swh.web.add_forge_now
- swh.web.archive_coverage
- swh.web.badges
- swh.web.banners
- swh.web.deposit
- swh.web.inbound_email
- swh.web.jslicenses
- swh.web.mailmap
- swh.web.metrics
- swh.web.save_code_now
- swh.web.save_origin_webhooks
- swh.web.vault
swh::deploy::webapp::metadata_search_backend: swh-search
swh::deploy::webapp::history_counters_url: "%{lookup('swh::remote_service::counters::url')}counters_history/history.json"
swh::deploy::webapp::counters_backend: swh-counters
swh::deploy::webapp::instance_name: archive.softwareheritage.org
swh::deploy::webapp::add_forge_now_email_address: "add-forge-now@%{lookup('swh::deploy::webapp::inbound_email::domain')}"
# in private data:
# deposit_basic_auth_swhworker_username
# deposit_basic_auth_swhworker_password
# webapp_give_public_key
# webapp_give_token
swh::deploy::webapp::config:
search: "%{alias('swh::remote_service::search::config')}"
search_config:
metadata_backend: "%{alias('swh::deploy::webapp::metadata_search_backend')}"
storage: "%{alias('swh::remote_service::storage::config')}"
vault: "%{alias('swh::remote_service::vault::config::writable')}"
indexer_storage: "%{alias('swh::remote_service::indexer::config')}"
scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}"
counters: "%{alias('swh::remote_service::counters::config')}"
counters_backend: "%{alias('swh::deploy::webapp::counters_backend')}"
log_dir: "%{hiera('swh::deploy::webapp::conf::log_dir')}"
secret_key: "%{hiera('swh::deploy::webapp::conf::secret_key')}"
content_display_max_size: 5242880 # 5MB
throttling: "%{alias('swh::deploy::webapp::config::throttling')}"
production_db: "%{alias('swh::deploy::webapp::production_db')}"
es_workers_index_url: "%{alias('swh::deploy::webapp::config::es_workers_index_url')}"
deposit:
private_api_url: "%{hiera('swh::deploy::webapp::deposit::private::url')}"
private_api_user: "%{hiera('deposit_basic_auth_swhworker_username')}"
private_api_password: "%{hiera('deposit_basic_auth_swhworker_password')}"
client_config:
sentry_dsn: "%{lookup('swh::deploy::webapp::sentry_dsn')}"
keycloak: "%{alias('swh::deploy::webapp::config::keycloak')}"
history_counters_url: "%{alias('swh::deploy::webapp::history_counters_url')}"
instance_name: "%{alias('swh::deploy::webapp::instance_name')}"
give:
public_key: "%{hiera('webapp_give_public_key')}"
token: "%{hiera('webapp_give_token')}"
add_forge_now:
email_address: "%{hiera('swh::deploy::webapp::add_forge_now_email_address')}"
swh_extra_django_apps: "%{alias('swh::deploy::webapp::config::swh_extra_django_apps')}"
swh::deploy::webapp::locked_endpoints:
- /api/1/content/[^/]+/symbol/
- /api/1/entity/
- /api/1/provenance/
swh::deploy::webapp::sync_mailmaps::db::service_name: "softwareheritage-sync-mailmaps"
swh::deploy::webapp::sync_mailmaps::db::host: "%{hiera('swh::deploy::storage::db::host')}"
swh::deploy::webapp::sync_mailmaps::db::port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
swh::deploy::webapp::sync_mailmaps::db::dbname: "%{hiera('swh::deploy::storage::db::dbname')}"
swh::deploy::webapp::sync_mailmaps::db::user: swhmailmap
# swh::deploy::webapp::sync_mailmaps::db::password in private data
# regexp style end-to-end check for production instance
# overwrite in staging for its dedicated snapshot check
swh::deploy::webapp::snapshot_e2e:
uri: '/browse/snapshot/baebc2109e4a2ec22a1129a3859647e191d04df4/branches/'
regexp:
- 'buster/main/4.13.13-1.*buster/main/4.14.12-2.*buster/main/4.14.13-1.*buster/main/4.14.17-1.*buster/main/4.15.4-1.*buster/main/4.9.65-3.*experimental/main/4.10~rc6-1~exp2.*jessie-backports/main/3.16.39-1.*jessie-backports/main/4.7.8-1~bpo8\\+1.*jessie-backports/main/4.9.18-1~bpo8\\+1.*jessie-backports/main/4.9.65-3\\+deb9u1~bpo8\\+1.*jessie-backports/main/4.9.65-3\\+deb9u2~bpo8\\+1.*jessie-kfreebsd/main/3.16.7-ckt9-2.*jessie-proposed-updates/main/3.16.51-3.*jessie-proposed-updates/main/3.16.51-3\\+deb8u1.*jessie-updates/main/3.16.51-3.*jessie/main/3.16.43-1.*jessie/main/3.16.51-2.*jessie/main/3.16.7-ckt2-1.*jessie/main/3.16.7-ckt20-1\\+deb8u3'
swh::deploy::webapp::icinga_checks:
get:
counters:
uri: '/api/1/stat/counters/'
string: '"content":'
'content end to end':
uri: '/browse/content/4dfc4478b1d5f7388b298fdfc06802485bdeae0c/'
string: 'PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2'
post:
content_known:
uri: '/api/1/content/known/search/'
post: 'q=8624bcdae55baeef00cd11d5dfcfa60f68710a02'
string: '"found":true'
regexp:
'directory end to end':
uri: '/browse/directory/977fc4b98c0e85816348cebd3b12026407c368b6/'
regexp:
- 'Doc.*Grammar.*Include.*Lib.*Mac.*Misc.*Modules.*Objects.*PC.*PCbuild.*LICENSE.*README.rst'
'revision end to end':
uri: '/browse/revision/f1b94134a4b879bc55c3dacdb496690c8ebdc03f/'
regexp:
- 'Allocate the output vlc pictures with dimensions padded,.*'
- 'as requested by the decoder \\(for alignments\\).'
'revision log end to end':
uri: '/browse/revision/b9b0ecd1e2f9db10335383651f8317ed8cec8296/log/'
regexp:
- '/browse/revision/b9b0ecd1e2f9db10335383651f8317ed8cec8296/.*Roberto Di Cosmo.*Moved to github'
'release end to end':
uri: '/browse/release/a9b7e3f1eada90250a6b2ab2ef3e0a846cb16831/'
regexp:
- 'Linux 4.9-rc8.*'
- '/revision/3e5de27e940d00d8d504dfb96625fb654f641509/'
'snapshot end to end': "%{alias('swh::deploy::webapp::snapshot_e2e')}"
# local configuration for the scheduler
swh::deploy::scheduler::config::local: &swh_scheduler_local_config
scheduler:
cls: postgresql
db: "host=%{hiera('swh::deploy::scheduler::db::host')} port=%{hiera('swh::deploy::scheduler::db::port')} dbname=%{hiera('swh::deploy::scheduler::db::dbname')} user=%{hiera('swh::deploy::scheduler::db::user')} password=%{hiera('swh::deploy::scheduler::db::password')}"
swh::deploy::scheduler::sentry_swh_package: swh.scheduler
swh::deploy::scheduler::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::scheduler::sentry_dsn: "https://%{lookup('swh::deploy::scheduler::sentry_token')}@sentry.softwareheritage.org/7"
swh::deploy::scheduler::conf_dir: "%{lookup('swh::conf_directory')}/scheduler"
swh::deploy::scheduler::conf_file: "%{hiera('swh::deploy::scheduler::conf_dir')}/listener-runner.yml"
swh::deploy::scheduler::user: swhscheduler
swh::deploy::scheduler::group: swhscheduler
swh::deploy::scheduler::db::host: db.internal.softwareheritage.org
swh::deploy::scheduler::db::port: "%{alias('swh::deploy::db::pgbouncer::port')}"
swh::deploy::scheduler::db::dbname: softwareheritage-scheduler
swh::deploy::scheduler::db::user: swhscheduler
# swh::deploy::scheduler::db::password in private data
# swh::deploy::scheduler::task_broker::password in private data
swh::deploy::scheduler::task_broker: "amqp://swhproducer:%{hiera('swh::deploy::scheduler::task_broker::password')}@rabbitmq:5672/%2f"
swh::deploy::scheduler::services::log_level: INFO
swh::deploy::scheduler::config:
<<: *swh_scheduler_local_config
celery:
task_broker: "%{alias('swh::deploy::scheduler::task_broker')}"
swh::deploy::scheduler::packages:
- python3-swh.lister
- python3-swh.loader.bzr
- python3-swh.loader.git
- python3-swh.loader.mercurial
- python3-swh.loader.svn
- python3-swh.loader.core
- python3-swh.scheduler
# subset limited to the save code now tasks for the runner priority
swh::deploy::scheduler::swh-scheduler-runner-priority::config::task_types:
- load-bzr
- load-git
- load-svn
- load-archive-files
- load-hg
swh::deploy::scheduler::remote::sentry_swh_package: swh.scheduler
swh::deploy::scheduler::remote::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::scheduler::remote::sentry_dsn: "%{alias('swh::deploy::scheduler::sentry_dsn')}"
swh::deploy::scheduler::remote::conf_dir: "%{alias('swh::deploy::scheduler::conf_dir')}"
swh::deploy::scheduler::remote::conf_file: "%{hiera('swh::deploy::scheduler::remote::conf_dir')}/backend.yml"
swh::deploy::scheduler::remote::user: swhscheduler
swh::deploy::scheduler::remote::group: swhscheduler
swh::deploy::scheduler::remote::backend::listen::host: 127.0.0.1
swh::deploy::scheduler::remote::backend::listen::port: "%{alias('swh::remote_service::scheduler::port')}"
swh::deploy::scheduler::remote::backend::workers: 16
swh::deploy::scheduler::remote::backend::reload_mercy: 3600
swh::deploy::scheduler::remote::backend::http_keepalive: 5
swh::deploy::scheduler::remote::backend::http_timeout: 3600
swh::deploy::scheduler::remote::backend::max_requests: 10000
swh::deploy::scheduler::remote::backend::max_requests_jitter: 1000
swh::deploy::scheduler::remote::backend::server_names:
- "%{::swh_hostname.internal_fqdn}"
- "%{::hostname}"
- 127.0.0.1
- localhost
- "::1"
swh::deploy::scheduler::remote::config: "%{alias('swh::deploy::scheduler::config::local')}"
swh::elasticsearch::storage_nodes:
- host: esnode2.internal.softwareheritage.org
port: 9200
- host: esnode3.internal.softwareheritage.org
port: 9200
- host: esnode1.internal.softwareheritage.org
port: 9200
swh::elasticsearch::search_nodes:
- host: search-esnode4.internal.softwareheritage.org
port: 9200
- host: search-esnode5.internal.softwareheritage.org
port: 9200
- host: search-esnode6.internal.softwareheritage.org
port: 9200
swh::deploy::scheduler::journal_client::config_file: "%{lookup('swh::deploy::scheduler::conf_dir')}/journal-client.yml"
swh::deploy::scheduler::journal_client::user: "%{alias('swh::deploy::scheduler::user')}"
swh::deploy::scheduler::journal_client::group: "%{alias('swh::deploy::scheduler::group')}"
swh::deploy::scheduler::journal_client::config:
<<: *swh_scheduler_local_config
journal:
brokers: "%{alias('swh::deploy::journal::brokers')}"
group_id: swh.scheduler.journal_client
# Main lister configuration
swh::deploy::worker::lister::db::user: swh-lister
swh::deploy::worker::lister::db::name: swh-lister
swh::deploy::worker::lister::db::host: db.internal.softwareheritage.org
swh::deploy::worker::lister::db::port: "%{alias('swh::deploy::db::pgbouncer::port')}"
# swh::deploy::lister::db::password in private data
# swh::deploy::worker::task_broker::password in private data
swh::deploy::worker::task_broker: "amqp://swhconsumer:%{hiera('swh::deploy::worker::task_broker::password')}@rabbitmq:5672/%2f"
swh::deploy::worker::instances: []
swh::deploy::loader_git::sentry_swh_package: swh.loader.git
swh::deploy::loader_git::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::loader_git::sentry_dsn: "https://%{lookup('swh::deploy::loader_git::sentry_token')}@sentry.softwareheritage.org/8"
swh::deploy::worker::loader_git::config_file: "%{hiera('swh::conf_directory')}/loader_git.yml"
swh::deploy::worker::loader_git::concurrency: 1
swh::deploy::worker::loader_git::max_tasks_per_child: 100
swh::deploy::worker::loader_git::temp_file_cutoff: 536870912 # 512 * 1024 * 1024
swh::deploy::worker::loader_git::loglevel: info
swh::deploy::worker::loader_git::load_metadata: true
swh::deploy::worker::loader_git::extra_config:
metadata_fetcher_credentials: "%{alias('swh::deploy::worker::lister::config::credentials')}"
swh::deploy::loader_git::queues:
- swh.loader.git.tasks.UpdateGitRepository
# loader-git-disk
- swh.loader.git.tasks.LoadDiskGitRepository
- swh.loader.git.tasks.UncompressAndLoadDiskGitRepository
swh::deploy::worker::loader_git::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
temp_file_cutoff: "%{alias('swh::deploy::worker::loader_git::temp_file_cutoff')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues: "%{alias('swh::deploy::loader_git::queues')}"
# high priority loader (save-code-now, ...)
swh::deploy::loader_high_priority::sentry_swh_package: swh.loader.highpriority
swh::deploy::loader_high_priority::sentry_environment: "%{hiera('swh::deploy::environment')}"
swh::deploy::loader_high_priority::sentry_dsn: "https://%{lookup('swh::deploy::loader_high_priority::sentry_token')}@sentry.softwareheritage.org/26"
swh::deploy::worker::loader_high_priority::config_file: "%{hiera('swh::conf_directory')}/loader_high_priority.yml"
swh::deploy::worker::loader_high_priority::queues:
# git
- save_code_now:swh.loader.git.tasks.UpdateGitRepository
# mercurial
- save_code_now:swh.loader.mercurial.tasks.LoadMercurial
- save_code_now:swh.loader.mercurial.tasks.LoadArchiveMercurial
# bzr
- save_code_now:swh.loader.bzr.tasks.LoadBazaar
# svn
- save_code_now:swh.loader.svn.tasks.LoadSvnRepository
- save_code_now:swh.loader.svn.tasks.MountAndLoadSvnRepository
- save_code_now:swh.loader.svn.tasks.DumpMountAndLoadSvnRepository
# archives
- save_code_now:swh.loader.package.archive.tasks.LoadArchive
swh::deploy::worker::loader_high_priority::concurrency: 1
swh::deploy::worker::loader_high_priority::max_tasks_per_child: 1
swh::deploy::worker::loader_high_priority::loglevel: info
swh::deploy::worker::loader_high_priority::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
celery:
task_acks_late: true
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues:
"%{alias('swh::deploy::worker::loader_high_priority::queues')}"
# One shot worker deactivated by default (e.g. first time ingestion on forge with
# restrictive loading ingestion parallelism policies)
swh::deploy::worker::loader_oneshot::config_file: "%{hiera('swh::conf_directory')}/loader_oneshot.yml"
swh::deploy::worker::loader_oneshot::concurrency: 1
swh::deploy::worker::loader_oneshot::max_tasks_per_child: 100
swh::deploy::worker::loader_oneshot::loglevel: info
swh::deploy::worker::loader_oneshot::task_queues: []
swh::deploy::worker::loader_oneshot::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues: "%{alias('swh::deploy::worker::loader_oneshot::task_queues')}"
# for all loader packages
swh::deploy::loader_core::sentry_swh_package: swh.loader.core
swh::deploy::loader_core::sentry_environment: "%{hiera('swh::deploy::environment')}"
swh::deploy::loader_core::sentry_dsn: "https://%{lookup('swh::deploy::loader_core::sentry_token')}@sentry.softwareheritage.org/9"
swh::deploy::worker::loader_debian::config_file: "%{hiera('swh::conf_directory')}/loader_debian.yml"
swh::deploy::worker::loader_debian::private_tmp: true
swh::deploy::worker::loader_debian::concurrency: 1
swh::deploy::worker::loader_debian::loglevel: info
swh::deploy::worker::loader_debian::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues:
- swh.loader.package.debian.tasks.LoadDebian
swh::deploy::worker::loader_archive::config_file: "%{hiera('swh::conf_directory')}/loader_archive.yml"
swh::deploy::worker::loader_archive::private_tmp: true
swh::deploy::worker::loader_archive::concurrency: 1
swh::deploy::worker::loader_archive::loglevel: info
swh::deploy::worker::loader_archive::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues:
- swh.loader.package.archive.tasks.LoadArchive
swh::deploy::loader_bzr::sentry_swh_package: swh.loader.bzr
swh::deploy::loader_bzr::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::loader_bzr::sentry_dsn: "https://%{lookup('swh::deploy::loader_bzr::sentry_token')}@sentry.softwareheritage.org/22"
swh::deploy::worker::loader_bzr::config_file: "%{hiera('swh::conf_directory')}/loader_bzr.yml"
swh::deploy::worker::loader_bzr::concurrency: 1
swh::deploy::worker::loader_bzr::private_tmp: true
swh::deploy::worker::loader_bzr::loglevel: info
swh::deploy::worker::loader_bzr::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
clone_timeout_seconds: 7200
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues:
- swh.loader.bzr.tasks.LoadBazaar
swh::deploy::worker::loader_cran::config_file: "%{hiera('swh::conf_directory')}/loader_cran.yml"
swh::deploy::worker::loader_cran::private_tmp: true
swh::deploy::worker::loader_cran::concurrency: 1
swh::deploy::worker::loader_cran::loglevel: info
swh::deploy::worker::loader_cran::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues:
- swh.loader.package.cran.tasks.LoadCRAN
swh::deploy::loader_cvs::sentry_swh_package: swh.loader.cvs
swh::deploy::loader_cvs::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::loader_cvs::sentry_dsn: "https://%{lookup('swh::deploy::loader_cvs::sentry_token')}@sentry.softwareheritage.org/21"
swh::deploy::worker::loader_cvs::config_file: "%{hiera('swh::conf_directory')}/loader_cvs.yml"
swh::deploy::worker::loader_cvs::concurrency: 1
swh::deploy::worker::loader_cvs::private_tmp: true
swh::deploy::worker::loader_cvs::loglevel: info
swh::deploy::worker::loader_cvs::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues:
- swh.loader.cvs.tasks.LoadCvsRepository
swh::deploy::worker::loader_nixguix::config_file: "%{hiera('swh::conf_directory')}/loader_nixguix.yml"
swh::deploy::worker::loader_nixguix::private_tmp: true
swh::deploy::worker::loader_nixguix::concurrency: 1
swh::deploy::worker::loader_nixguix::loglevel: info
swh::deploy::worker::loader_nixguix::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues:
- swh.loader.package.nixguix.tasks.LoadNixguix
unsupported_file_extensions:
- patch
- iso
- whl
- gem
- pom
- msi
- pod
- png
- rock
- ttf
- jar
- c
- el
- rpm
- diff
swh::deploy::lister::queues:
- swh.lister.bitbucket.tasks.IncrementalBitBucketLister
- swh.lister.bitbucket.tasks.FullBitBucketRelister
- swh.lister.cgit.tasks.CGitListerTask
- swh.lister.cran.tasks.CRANListerTask
- swh.lister.debian.tasks.DebianListerTask
- swh.lister.gitea.tasks.IncrementalGiteaLister
- swh.lister.gitea.tasks.RangeGiteaLister
- swh.lister.gitea.tasks.FullGiteaRelister
- swh.lister.gitlab.tasks.IncrementalGitLabLister
- swh.lister.gitlab.tasks.RangeGitLabLister
- swh.lister.gitlab.tasks.FullGitLabRelister
- swh.lister.gnu.tasks.GNUListerTask
- swh.lister.launchpad.tasks.FullLaunchpadLister
- swh.lister.launchpad.tasks.IncrementalLaunchpadLister
- swh.lister.opam.tasks.OpamListerTask
- swh.lister.npm.tasks.NpmListerTask
- swh.lister.phabricator.tasks.FullPhabricatorLister
- swh.lister.pypi.tasks.PyPIListerTask
- swh.lister.sourceforge.tasks.FullSourceForgeLister
- swh.lister.sourceforge.tasks.IncrementalSourceForgeLister
- swh.lister.maven.tasks.FullMavenLister
- swh.lister.maven.tasks.IncrementalMavenLister
swh::deploy::lister::sentry_swh_package: swh.lister
swh::deploy::lister::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::lister::sentry_dsn: "https://%{lookup('swh::deploy::lister::sentry_token')}@sentry.softwareheritage.org/6"
swh::deploy::worker::lister::config_file: "%{hiera('swh::conf_directory')}/lister.yml"
swh::deploy::worker::lister::concurrency: 5
swh::deploy::worker::lister::loglevel: warning
swh::deploy::worker::lister::config:
storage: "%{alias('swh::remote_service::storage::config::writable')}"
scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues: "%{alias('swh::deploy::lister::queues')}"
credentials: "%{alias('swh::deploy::worker::lister::config::credentials')}"
swh::deploy::worker::loader_maven::config_file: "%{hiera('swh::conf_directory')}/loader_maven.yml"
swh::deploy::worker::loader_maven::concurrency: 1
swh::deploy::worker::loader_maven::loglevel: info
swh::deploy::worker::loader_maven::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues:
- swh.loader.package.maven.tasks.LoadMaven
swh::deploy::loader_mercurial::sentry_swh_package: swh.loader.mercurial
swh::deploy::loader_mercurial::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::loader_mercurial::sentry_dsn: "https://%{lookup('swh::deploy::loader_mercurial::sentry_token')}@sentry.softwareheritage.org/10"
swh::deploy::worker::loader_mercurial::config_file: "%{hiera('swh::conf_directory')}/loader_mercurial.yml"
swh::deploy::worker::loader_mercurial::concurrency: 1
swh::deploy::worker::loader_mercurial::private_tmp: true
swh::deploy::worker::loader_mercurial::loglevel: info
swh::deploy::worker::loader_mercurial::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
clone_timeout_seconds: 7200
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues:
- swh.loader.mercurial.tasks.LoadMercurial
- swh.loader.mercurial.tasks.LoadArchiveMercurial
swh::deploy::worker::loader_opam::user: swhworker
swh::deploy::worker::loader_opam::group: swhworker
swh::deploy::worker::opam::root_directory: /tmp/opam/
# Keep opam.ocaml.org entry as the first one which is the default repository
swh::deploy::worker::opam::default_instance::name: opam.ocaml.org
swh::deploy::worker::opam::default_instance::url: https://opam.ocaml.org
swh::deploy::worker::opam::instances:
coq.inria.fr: https://coq.inria.fr/opam/released
ocamlbench-repo: https://github.com/OCamlPro/ocamlbench-repo.git
swh::deploy::worker::loader_opam::config_file: "%{hiera('swh::conf_directory')}/loader_opam.yml"
swh::deploy::worker::loader_opam::concurrency: 1
swh::deploy::worker::loader_opam::private_tmp: false
swh::deploy::worker::loader_opam::loglevel: info
swh::deploy::worker::loader_opam::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues:
- swh.loader.package.opam.tasks.LoadOpam
swh::deploy::worker::loader_pypi::config_file: "%{hiera('swh::conf_directory')}/loader_pypi.yml"
swh::deploy::worker::loader_pypi::concurrency: 1
swh::deploy::worker::loader_pypi::private_tmp: true
swh::deploy::worker::loader_pypi::loglevel: info
swh::deploy::worker::loader_pypi::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues:
- swh.loader.package.pypi.tasks.LoadPyPI
swh::deploy::worker::loader_npm::config_file: "%{hiera('swh::conf_directory')}/loader_npm.yml"
swh::deploy::worker::loader_npm::concurrency: 1
swh::deploy::worker::loader_npm::private_tmp: true
swh::deploy::worker::loader_npm::loglevel: info
swh::deploy::worker::loader_npm::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues:
- swh.loader.package.npm.tasks.LoadNpm
swh::deploy::loader_svn::sentry_swh_package: swh.loader.svn
swh::deploy::loader_svn::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::loader_svn::sentry_dsn: "https://%{lookup('swh::deploy::loader_svn::sentry_token')}@sentry.softwareheritage.org/14"
swh::deploy::worker::loader_svn::config_file: "%{hiera('swh::conf_directory')}/loader_svn.yml"
swh::deploy::worker::loader_svn::concurrency: 1
# so the fallback clean up becomes more effective
swh::deploy::worker::loader_svn::max_tasks_per_child: 1
swh::deploy::worker::loader_svn::private_tmp: true
swh::deploy::worker::loader_svn::limit_no_file: 8192
swh::deploy::worker::loader_svn::loglevel: info
# Contains a password: in private data
swh::deploy::worker::loader_svn::config:
storage: "%{alias('swh::deploy::worker::storage::pipeline')}"
max_content_size: "%{alias('swh::deploy::worker::loader::max_content_size')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_queues:
- swh.loader.svn.tasks.LoadSvnRepository
- swh.loader.svn.tasks.MountAndLoadSvnRepository
- swh.loader.svn.tasks.DumpMountAndLoadSvnRepository
swh::deploy::base_indexer::config_directory: "%{hiera('swh::conf_directory')}/indexer"
# for all indexers
swh::deploy::indexer::sentry_swh_package: swh.indexer
swh::deploy::indexer::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::indexer::sentry_dsn: "https://%{lookup('swh::deploy::indexer::sentry_token')}@sentry.softwareheritage.org/5"
swh::deploy::indexer::user: swhworker
swh::deploy::indexer::group: swhworker
# To remove once previous implementations services are deployed
swh::deploy::worker::indexer_content_mimetype::config_file: "%{hiera('swh::conf_directory')}/indexer_content_mimetype.yml"
swh::deploy::worker::indexer_content_mimetype::concurrency: 1
swh::deploy::worker::indexer_content_mimetype::loglevel: info
swh::deploy::worker::indexer_content_mimetype::config: {}
swh::deploy::worker::indexer_fossology_license::config_file: "%{hiera('swh::conf_directory')}/indexer_fossology_license.yml"
swh::deploy::worker::indexer_fossology_license::concurrency: 1
swh::deploy::worker::indexer_fossology_license::loglevel: info
swh::deploy::worker::indexer_fossology_license::config: {}
swh::deploy::indexer_journal_client::content_mimetype::config_file: "content_mimetype.yml"
swh::deploy::indexer_journal_client::content_mimetype::loglevel: INFO
swh::deploy::indexer_journal_client::content_mimetype::journal_authentication: true
# Contains a password: in private data
swh::deploy::indexer_journal_client::content_mimetype::config:
# FIXME: Required by BaseIndexer class code, unused in this context though
scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}"
indexer_storage: "%{alias('swh::remote_service::indexer::config::writable')}"
objstorage: "%{alias('swh::remote_service::objstorage::config')}"
journal:
brokers: "%{alias('swh::deploy::journal::brokers')}"
group_id: swh.indexer.journal_client.content_mimetype
prefix: "%{alias('swh::deploy::journal::prefix')}"
tools:
name: file
version: 2:0.4.15-2
configuration:
type: library
debian-package: python3-magic
write_batch_size: 1000
swh::deploy::indexer_journal_client::content_fossology_license::config_file: "content_fossology_license.yml"
swh::deploy::indexer_journal_client::content_fossology_license::loglevel: INFO
swh::deploy::indexer_journal_client::content_fossology_license::journal_authentication: true
# Contains a password: in private data
swh::deploy::indexer_journal_client::content_fossology_license::config:
# FIXME: Required by BaseIndexer class code, unused in this context though
scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}"
indexer_storage: "%{alias('swh::remote_service::indexer::config::writable')}"
objstorage: "%{alias('swh::remote_service::objstorage::config')}"
journal:
brokers: "%{alias('swh::deploy::journal::brokers')}"
group_id: swh.indexer.journal_client.content_fossology_license
prefix: "%{alias('swh::deploy::journal::prefix')}"
workdir: /tmp/swh/indexer.fossology.license/
tools:
name: 'nomos'
version: '3.1-1~bpo9~swh+1'
configuration:
command_line: "nomossa <filepath>"
write_batch_size: 1000
swh::deploy::indexer_journal_client::extrinsic_metadata::config_file: "extrinsic_metadata.yml"
swh::deploy::indexer_journal_client::extrinsic_metadata::loglevel: INFO
swh::deploy::indexer_journal_client::extrinsic_metadata::journal_authentication: true
swh::deploy::indexer_journal_client::extrinsic_metadata::config:
# FIXME: Required by BaseIndexer class code, unused in this context though
scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}"
indexer_storage: "%{alias('swh::remote_service::indexer::config::writable')}"
objstorage: "%{alias('swh::remote_service::objstorage::config')}"
storage: "%{alias('swh::remote_service::storage::config')}"
journal:
brokers: "%{alias('swh::deploy::journal::brokers')}"
group_id: swh.indexer.journal_client.extrinsic_metadata
prefix: "%{alias('swh::deploy::journal::prefix')}"
tools:
name: swh-metadata-detector
version: 0.0.2
configuration: {}
swh::deploy::indexer_journal_client::origin_intrinsic_metadata::config_file: "origin_intrinsic_metadata.yml"
swh::deploy::indexer_journal_client::origin_intrinsic_metadata::batch_size: 200
swh::deploy::indexer_journal_client::origin_intrinsic_metadata::loglevel: INFO
swh::deploy::indexer_journal_client::origin_intrinsic_metadata::journal_authentication: true
swh::deploy::indexer_journal_client::origin_intrinsic_metadata::config:
# FIXME: Required by BaseIndexer class code, unused in this context though
scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}"
indexer_storage: "%{alias('swh::remote_service::indexer::config::writable')}"
objstorage: "%{alias('swh::remote_service::objstorage::config')}"
storage: "%{alias('swh::remote_service::storage::config')}"
journal:
brokers: "%{alias('swh::deploy::journal::brokers')}"
group_id: swh.indexer.journal_client.origin_intrinsic_metadata
prefix: "%{alias('swh::deploy::journal::prefix')}"
batch_size: "%{alias('swh::deploy::indexer_journal_client::origin_intrinsic_metadata::batch_size')}"
tools:
name: swh-metadata-detector
version: 0.0.2
configuration: {}
swh::deploy::worker::indexer_rehash::config_file: "rehash.yml"
swh::deploy::worker::indexer_rehash::concurrency: 5
swh::deploy::worker::indexer_rehash::loglevel: info
# Contains a password: in private data
swh::deploy::worker::indexer_rehash::config:
storage: "%{alias('swh::remote_service::storage::config::writable')}"
objstorage: "%{alias('swh::remote_service::objstorage::config')}"
compute_checksums:
- blake2s256
batch_size_retrieve_content: 10000
batch_size_update: 5000
swh::deploy::worker::vault_cooker::config_file: "%{hiera('swh::conf_directory')}/vault_cooker.yml"
swh::deploy::worker::vault_cooker::concurrency: 20
swh::deploy::worker::vault_cooker::loglevel: info
swh::deploy::worker::vault_cooker::conf_file: "%{hiera('swh::conf_directory')}/vault/cooker.yml"
swh::deploy::worker::vault_cooker::config:
storage:
cls: retry
storage: "%{alias('swh::remote_service::storage::config')}"
vault: "%{alias('swh::remote_service::vault::config::writable')}"
celery:
task_broker: "%{alias('swh::deploy::worker::task_broker')}"
task_modules:
- swh.vault.cooking_tasks
task_queues:
- swh.vault.cooking_tasks.SWHCookingTask
- swh.vault.cooking_tasks.SWHBatchCookingTask
max_bundle_size: 1073741824 # 1GiB
desktop::printers:
MFP_C:
uri: lpd://print.paris.inria.fr/MFP_C-pro
description: Impression couleur
location: Partout
ppd: "%{hiera('desktop::printers::ppd_dir')}/MFP_Paris.ppd"
ppd_options:
ColorType: Color
MFP:
uri: lpd://print.paris.inria.fr/MFP-pro
description: Impression Noir et Blanc
location: Partout
ppd: "%{hiera('desktop::printers::ppd_dir')}/MFP_Paris.ppd"
ppd_options:
ColorType: Mono
desktop::printers::default: MFP
desktop::printers::ppd_dir: /usr/share/ppd/softwareheritage
desktop::printers::cups_usernames:
ardumont: andumont
morane: mgruenpe
olasd: ndandrim
seirl: apietri
zack: zacchiro
icinga2::role: agent
icinga2::master::zonename: master
icinga2::master::db::username: icinga2
# icinga2::master::db::password in private data
icinga2::master::db::database: icinga2
icinga2::icingaweb2::db::username: icingaweb2
# icinga2::icingaweb2::db::password in private data
icinga2::icingaweb2::db::database: icingaweb2
icinga2::icingaweb2::protected_customvars:
- "*pw*"
- "*pass*"
- community
- http_auth_pair
# Must have matching certificate in letsencrypt::certificates
icinga2::icingaweb2::vhost::name: icinga.softwareheritage.org
icinga2::icingaweb2::vhost::aliases:
- icinga.internal.softwareheritage.org
icinga2::icingaweb2::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}"
icinga2::icingaweb2::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}"
icinga2::icingaweb2::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}"
icinga2::icingaweb2::vhost::hsts_header: "%{hiera('apache::hsts_header')}"
icinga2::parent_zone: master
icinga2::parent_endpoints:
pergamon.softwareheritage.org:
host: 192.168.100.29
icinga2::network: "%{lookup('internal_network')}"
icinga2::features:
- checker
- mainlog
icinga2::service_configuration:
load:
default:
load_wload1: 30
load_wload5: 28
load_wload15: 26
load_cload1: 50
load_cload5: 45
load_cload15: 40
sql:
load_wload1: 50
load_wload5: 40
load_wload15: 35
load_cload1: 70
load_cload5: 60
load_cload15: 50
high:
load_wload1: 140
load_wload5: 120
load_wload15: 100
load_cload1: 240
load_cload5: 220
load_cload15: 200
hypervisor:
load_wload1: 60
load_wload5: 50
load_wload15: 45
load_cload1: 120
load_cload5: 110
load_cload15: 100
icinga2::host::vars:
os: Linux
cores: "%{::processorcount}"
virtual_machine: "%{::is_virtual}"
distro: "%{::operatingsystem}"
disks:
'disk /':
disk_partitions: '/'
icinga2::disk::excludes:
- ^/srv/containers/
- ^/var/lib/docker/overlay2/
- ^/var/lib/docker/buildkit/
- ^/var/lib/docker/zfs/
- ^/var/lib/kubelet/
- ^/run/schroot/
- ^/run/k3s/
# directory traversal not allowed under postgresql directories
- postgresql/.*/pg_wal
- postgres/.*/pg_wal
icinga2::apiusers:
root:
# password in private data
permissions:
- '*'
icinga2::exported_checks::filename: "/etc/icinga2/zones.d/%{hiera('icinga2::parent_zone')}/exported-checks.conf"
logstash_hosts:
- logstash.internal.softwareheritage.org:5044
systemd_journal::logstash_hosts: "%{alias('logstash_hosts')}"
filebeat::config:
filebeat.config.inputs:
enabled: true
path: inputs.d/*.yml
output:
logstash:
hosts: "%{alias('logstash_hosts')}"
memcached::server::bind: 127.0.0.1
memcached::server::port: 11211
memcached::server::max_memory: '5%'
mountpoints: {}
ceph::release: luminous
ceph::fsid: b3e34018-388e-499b-9579-d1c0d57e8c09
# needs to match the values of $::hostname on the ceph monitors
ceph::mon_initial_members:
- ceph-mon1
ceph::mon_host:
- 192.168.100.170
ceph::keys:
admin:
secret: "%{hiera('ceph::secrets::admin')}"
cap_mds: allow
cap_mgr: allow *
cap_mon: allow *
cap_osd: allow *
bootstrap-osd:
secret: "%{hiera('ceph::secrets::bootstrap_osd')}"
cap_mon: allow profile bootstrap-osd
proxmox-rbd:
secret: "%{hiera('ceph::secrets::proxmox_rbd')}"
cap_mon: profile rbd
cap_osd: profile rbd pool=rbd
swh-contents:
secret: "%{hiera('ceph::secrets::swh_contents')}"
cap_mon: allow r
cap_osd: allow r pool=swh_contents
swh-contents-rw:
secret: "%{hiera('ceph::secrets::swh_contents_rw')}"
cap_mon: allow r
cap_osd: allow rw pool=swh_contents
swh-contents-test:
secret: "%{hiera('ceph::secrets::swh_contents_test')}"
cap_mon: allow r
cap_osd: allow r pool=swh_contents_test
swh-contents-test-rw:
secret: "%{hiera('ceph::secrets::swh_contents_test_rw')}"
cap_mon: allow r
cap_osd: allow rw pool=swh_contents_test
ceph::default_client_keyring: /etc/softwareheritage/ceph-keyring
ceph::client_keyrings:
'/etc/softwareheritage/ceph-keyring':
owner: root
group: swhdev
mode: '0644'
keys:
- swh-contents
- swh-contents-test
nginx::package_name: nginx-light
nginx::accept_mutex: 'off'
nginx::names_hash_bucket_size: 128
nginx::names_hash_max_size: 1024
nginx::worker_processes: "%{::processorcount}"
nginx::metrics_port: 9081
nginx::metrics_location: '/metrics'
# prometheus::pve-exporter::password in credential
prometheus::pve-exporter::user: pve_exporter@pve
prometheus::server::defaults_config:
web:
enable_admin_api: true
storage:
tsdb:
retention: '30d'
min-block-duration: '2h'
max-block-duration: '2h'
prometheus::server::config::global:
scrape_interval: 1m
scrape_timeout: 45s
external_labels:
tenant: "%{lookup('thanos::tenant')}"
replica: "%{lookup('thanos::replica')}"
prometheus::server::config::static_scrape_configs: []
prometheus::server::listen_network: "%{lookup('internal_network')}"
prometheus::server::listen_port: 9090
prometheus::server::certname: pergamon.softwareheritage.org
prometheus::server::fqdn: pergamon.internal.softwareheritage.org
swh::deploy::environment: production
prometheus::static_labels:
instance: "%{::swh_hostname.internal_fqdn}"
environment: "%{lookup('swh::deploy::environment')}"
prometheus::node::listen_network: "%{lookup('internal_network')}"
prometheus::node::listen_port: 9100
prometheus::node::textfile_directory: /var/lib/prometheus/node-exporter
prometheus::node::defaults_config:
collector:
diskstats:
ignored_devices: "^(ram|loop|fd|(h|s|v|xv)d[a-z]|nvme\\d+n\\d+p)\\d+$"
filesystem:
ignored_mount_points: "^/(sys|proc|dev|run|srv/softwareheritage/objects/[0-9a-f][0-9a-f])($|/)"
systemd: true
logind: true
loadavg: true
ntp: true
netstat: true
textfile:
directory: "%{lookup('prometheus::node::textfile_directory')}"
prometheus::node::scripts::directory: /var/lib/prometheus/node-exporter-scripts
prometheus::node::scripts:
puppet-classes:
mode: cron
cron:
user: root
specification:
minute: fqdn_rand
apt:
mode: cron
cron:
user: root
specification:
minute: fqdn_rand
prometheus::statsd::exporter::version: 0.22.5
prometheus::statsd::exporter::archive_sha256sum: b04a25fe937a2e74dab097d589bd1f4da9e874d62b166c4e74d5d55b0f58eab6
prometheus::statsd::listen_network: "%{lookup('internal_network')}"
prometheus::statsd::listen_port: 9102
prometheus::statsd::defaults_config: {}
prometheus::statsd::statsd_listen_tcp: 127.0.0.1:8125
prometheus::statsd::statsd_listen_udp: 127.0.0.1:8125
prometheus::statsd::mapping:
defaults:
timer_type: histogram
buckets:
- .005
- .01
- .025
- .05
- .1
- .25
- .5
- .75
- 1
- 2
- 5
- 10
- 15
- 30
- 45
- 60
- 120
- 300
- 600
- 900
- 1800
- 2700
- 3600
- 7200
mappings:
- match: "(.*_percent)"
name: "${1}"
match_type: regex
observer_type: histogram
histogram_options:
buckets:
- 0.0
- 0.05
- 0.1
- 0.15
- 0.2
- 0.25
- 0.3
- 0.35
- 0.4
- 0.45
- 0.5
- 0.55
- 0.6
- 0.65
- 0.7
- 0.75
- 0.8
- 0.85
- 0.9
- 0.95
- 1.
prometheus::sql::listen_network: "%{lookup('internal_network')}"
prometheus::sql::listen_port: 9237
prometheus::sql::config_snippets:
- activity
- queries
- replication
- wal
prometheus::jmx::version: 0.17.2
prometheus::kafka::listen_network: "%{lookup('internal_network')}"
prometheus::kafka::listen_port: 7071
prometheus::kafka_consumer_group::listen_network: "%{lookup('internal_network')}"
prometheus::kafka_consumer_group::base_port: 9208
prometheus::rabbitmq::listen_network: "%{lookup('internal_network')}"
prometheus::rabbitmq::listen_port: 9419
# Include first, then skip
prometheus::rabbitmq::include_vhost: .*
prometheus::rabbitmq::skip_vhost: ^$
prometheus::rabbitmq::include_queues: .*
prometheus::rabbitmq::skip_queues: ^(.*\.pidbox|amq\.gen.*|.*\.tasks\.ping)$
prometheus::rabbitmq::rabbit_capabilities:
- bert
- no_sort
prometheus::rabbitmq::rabbit_exporters:
- exchange
- node
- queue
prometheus::rabbitmq::rabbit_timeout: 30
prometheus::rabbitmq::exclude_metrics: []
prometheus::nginx::listen_network: "%{lookup('internal_network')}"
prometheus::nginx::listen_port: 9103
prometheus::varnish::listen_network: "%{lookup('internal_network')}"
prometheus::varnish::listen_port: 9104
thanos::base::config_dir: "/etc/thanos"
thanos::release::version: 0.28.0
thanos::release::digest: 5d636aed85b5e060cbd5f331b7f97389683f0146e9091384781aeadf36b8e8cc
thanos::release::digest_type: sha256
thanos::port::http: 19191
thanos::port::grpc: 19090
thanos::sidecar::port_http: "%{lookup('thanos::port::http')}"
thanos::sidecar::port_grpc: "%{lookup('thanos::port::grpc')}"
thanos::query::port_http: "%{lookup('thanos::port::http')}"
thanos::query::config_filepath: "%{lookup('thanos::base::config_dir')}/query-sd.yaml"
thanos::tenant: "%{::subnet}"
thanos::replica: "0"
thanos::objstore::azure_account: swhthanosmetrics
# thanos::objstore::azure_account_key in credentials
thanos::objstore::config:
type: AZURE
config:
storage_account: "%{lookup('thanos::objstore::azure_account')}"
storage_account_key: "%{lookup('thanos::objstore::azure_account_key')}"
container: "metrics-%{lookup('thanos::tenant')}-%{lookup('thanos::replica')}"
# Other puppet managed stores will be automatically dealt with
thanos::query::non_puppet_managed::stores:
- mmca-thanos.softwareheritage.org:443
- k8s-admin-thanos.internal.admin.swh.network:443
- k8s-archive-staging-thanos.internal.staging.swh.network:443
- k8s-archive-production-thanos.internal.softwareheritage.org:443
+ - k8s-archive-production-rke2-thanos.internal.softwareheritage.org:443
- k8s-gitlab-staging-thanos.internal.staging.swh.network:443
- k8s-gitlab-production-thanos.euwest.azure.internal.softwareheritage.org:443
- k8s-rancher-thanos.euwest.azure.internal.softwareheritage.org:443
thanos::stores:
historical:
azure-storage-container: metrics-historical-data-0
store:
port-http: 19193
port-grpc: 19093
compact:
port-http: 19293
mmca:
azure-storage-container: metrics-mmca-0
store:
port-http: 19194
port-grpc: 19094
compact:
port-http: 19294
archive-staging: # rancher cluster
azure-storage-container: metrics-sesi-rocquencourt-rancher-staging-0
store:
port-http: 19195
port-grpc: 19095
compact:
port-http: 19295
archive-production: # rancher cluster
azure-storage-container: metrics-sesi-rocquencourt-rancher-production-0
store:
port-http: 19196
port-grpc: 19096
compact:
port-http: 19296
k8s-admin: # rancher cluster
azure-storage-container: metrics-sesi-rocquencourt-rancher-admin-0
store:
port-http: 19197
port-grpc: 19097
compact:
port-http: 19297
k8s-gitlab-staging: # AKS cluster
azure-storage-container: metrics-euwest-gitlab-staging-0
store:
port-http: 19198
port-grpc: 19098
compact:
port-http: 19298
k8s-gitlab-production: # AKS cluster
azure-storage-container: metrics-euwest-gitlab-production-0
store:
port-http: 19199
port-grpc: 19099
compact:
port-http: 19299
k8s-rancher: # AKS cluster
azure-storage-container: metrics-euwest-rancher-0
store:
port-http: 19200
port-grpc: 19100
compact:
port-http: 19300
+ archive-production-rke2: # rancher cluster
+ azure-storage-container: metrics-sesi-rocquencourt-rancher-production-rke2-0
+ store:
+ port-http: 19201
+ port-grpc: 19101
+ compact:
+ port-http: 19301
grafana::db::database: grafana
grafana::db::username: grafana
# grafana::db::password in private-data
grafana::backend::port: 3000
# Must have a matching certificate in letsencrypt::certificates
grafana::vhost::name: grafana.softwareheritage.org
grafana::config:
app_mode: production
server:
root_url: "https://%{lookup('grafana::vhost::name')}/"
http_port: "%{alias('grafana::backend::port')}"
users:
allow_sign_up: false
auth.anonymous:
enabled: true
org_name: Software Heritage
org_role: Viewer
smtp:
enabled: true
skip_verify: true
from_address: grafana@softwareheritage.org
grafana::objects::organizations:
- name: Software Heritage
id: 1
grafana::objects::users: []
grafana::objects::datasources:
- name: Prometheus (Pergamon)
url: "http://pergamon.internal.softwareheritage.org:%{hiera('prometheus::server::listen_port')}"
type: prometheus
organization: 1
access_mode: proxy
is_default: true
java::distribution: jre
jenkins::backend::url: http://thyssen.internal.softwareheritage.org:8080/
jenkins::vhost::name: jenkins.softwareheritage.org
jenkins::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}"
jenkins::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}"
jenkins::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}"
jenkins::vhost::hsts_header: "%{hiera('apache::hsts_header')}"
jenkins::agent::jar_url: "https://%{hiera('jenkins::vhost::name')}/jnlpJars/agent.jar"
jenkins::agent::name: "%{::swh_hostname.internal_fqdn}"
# jenkins::agent::jnlp::token in private_data
weekly_report_bot::user: nobody
weekly_report_bot::cron:
minute: 0
hour: 12
weekday: fri
monthly_report_bot::user: nobody
monthly_report_bot::cron:
minute: 0
hour: 0
monthday: 1
weekly_planning_bot::user: nobody
weekly_planning_bot::cron:
minute: 42
hour: 13
weekday: fri
swh::postgres::service::users:
- root
- zack
- ardumont
swh::postgres::service::dbs:
- alias: admin-swh
name: "%{hiera('swh::deploy::storage::db::dbname')}"
host: "%{hiera('swh::deploy::storage::db::host')}"
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: "%{hiera('swh::deploy::storage::db::user')}"
password: "%{hiera('swh::deploy::storage::db::password')}"
- alias: admin-swh-deposit
name: "%{hiera('swh::deploy::deposit::db::dbname')}"
host: "%{hiera('swh::deploy::deposit::db::host')}"
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: "%{hiera('swh::deploy::deposit::db::dbuser')}"
password: "%{hiera('swh::deploy::deposit::db::password')}"
- alias: admin-swh-scheduler
name: "%{hiera('swh::deploy::scheduler::db::dbname')}"
host: "%{hiera('swh::deploy::scheduler::db::host')}"
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: "%{hiera('swh::deploy::scheduler::db::user')}"
password: "%{hiera('swh::deploy::scheduler::db::password')}"
- alias: admin-swh-vault
name: "%{hiera('swh::deploy::vault::db::dbname')}"
host: "%{hiera('swh::deploy::vault::db::host')}"
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: "%{hiera('swh::deploy::vault::db::user')}"
password: "%{hiera('swh::deploy::vault::db::password')}"
- alias: admin-swh-lister
name: "%{hiera('swh::deploy::worker::lister::db::name')}"
host: "%{hiera('swh::deploy::worker::lister::db::host')}"
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: "%{hiera('swh::deploy::worker::lister::db::name')}"
password: "%{hiera('swh::deploy::lister::db::password')}"
- alias: admin-swh-replica
name: "%{hiera('swh::deploy::storage::db::dbname')}"
host: somerset.internal.softwareheritage.org
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: "%{hiera('swh::deploy::db::pgbouncer::user::login')}"
password: "%{hiera('swh::deploy::storage::db::password')}"
- alias: admin-swh-indexer
name: "%{hiera('swh::deploy::indexer::storage::db::dbname')}"
host: "%{hiera('swh::deploy::indexer::storage::db::host')}"
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: "%{hiera('swh::deploy::indexer::storage::db::user')}"
password: "%{hiera('swh::deploy::indexer::storage::db::password')}"
- alias: admin-swh-web
name: "%{hiera('swh::deploy::webapp::db::name')}"
host: "%{hiera('swh::deploy::webapp::db::host')}"
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: "%{hiera('swh::deploy::webapp::db::user')}"
password: "%{hiera('swh::deploy::webapp::db::password')}"
- alias: swh
name: "%{hiera('swh::deploy::storage::db::dbname')}"
host: "%{hiera('swh::deploy::storage::db::host')}"
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: guest
- alias: swh-deposit
name: "%{hiera('swh::deploy::deposit::db::dbname')}"
host: "%{hiera('swh::deploy::deposit::db::host')}"
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: guest
- alias: swh-scheduler
name: "%{hiera('swh::deploy::scheduler::db::dbname')}"
host: "%{hiera('swh::deploy::scheduler::db::host')}"
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: guest
- alias: swh-vault
name: "%{hiera('swh::deploy::vault::db::dbname')}"
host: "%{hiera('swh::deploy::vault::db::host')}"
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: guest
- alias: swh-lister
name: "%{hiera('swh::deploy::worker::lister::db::name')}"
host: "%{hiera('swh::deploy::worker::lister::db::host')}"
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: guest
- alias: swh-replica
name: "%{hiera('swh::deploy::storage::db::dbname')}"
host: somerset.internal.softwareheritage.org
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: guest
- alias: swh-indexer
name: "%{hiera('swh::deploy::indexer::storage::db::dbname')}"
host: "%{hiera('swh::deploy::indexer::storage::db::host')}"
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: guest
- alias: swh-web
name: "%{hiera('swh::deploy::webapp::db::name')}"
host: "%{hiera('swh::deploy::webapp::db::host')}"
port: "%{hiera('swh::deploy::db::pgbouncer::port')}"
user: guest
elastic::elk_version: '7.15.2'
elasticsearch::hosts:
- http://esnode1.internal.softwareheritage.org:9200
- http://esnode2.internal.softwareheritage.org:9200
- http://esnode3.internal.softwareheritage.org:9200
elasticsearch::jvm_options:
- "-Xms%{lookup('elasticsearch::jvm_options::heap_size')}"
- "-Xmx%{lookup('elasticsearch::jvm_options::heap_size')}"
elasticsearch::config::path::data: /srv/elasticsearch
elasticsearch::config::path::logs: /var/log/elasticsearch
elasticsearch::config::http::port: 9200
elasticsearch::config::prometheus::indices: false
elasticsearch::config:
cluster.name: "%{alias('elasticsearch::config::cluster::name')}"
node.name: "%{::hostname}"
discovery.seed_hosts: "%{alias('elasticsearch::config::discovery::seed_hosts')}"
cluster.initial_master_nodes: "%{alias('elasticsearch::config::cluster::initial_master_nodes')}"
path.data: "%{alias('elasticsearch::config::path::data')}"
path.logs: "%{alias('elasticsearch::config::path::logs')}"
http.port: "%{alias('elasticsearch::config::http::port')}"
prometheus.indices: "%{alias('elasticsearch::config::prometheus::indices')}"
indices.memory.index_buffer_size: 50%
index.store.type: hybridfs
logstash::listen_network: "%{lookup('internal_network')}"
logstash::elasticsearch::hosts: "%{alias('elasticsearch::hosts')}"
kibana::listen_network: "%{lookup('internal_network')}"
kibana::server_name: "%{::swh_hostname.internal_fqdn}"
kibana::config:
server.name: "%{alias('kibana::server_name')}"
elasticsearch.hosts: "%{alias('elasticsearch::hosts')}"
kibana.index: .kibana
# puppet-module-keycloak has some issues with Keycloak 11.x so we stick to 10.x
# until then (https://github.com/treydock/puppet-module-keycloak/pull/154)
keycloak::version: 10.0.2
keycloak::swh_theme::repo_url: https://forge.softwareheritage.org/source/swh-keycloak-theme.git
keycloak::swh_theme::tag: v0.3.1
keycloak::vhost::name: auth.softwareheritage.org
keycloak::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}"
keycloak::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}"
keycloak::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}"
keycloak::vhost::hsts_header: "%{hiera('apache::hsts_header')}"
keycloak::backend::port: 8080
keycloak::backend::url: "http://kelvingrove.internal.softwareheritage.org:%{lookup('keycloak::backend::port')}/"
keycloak::admin::user: keycloak-admin
# keycloak::admin::password in private-data
keycloak::postgres::host: db1.internal.admin.swh.network
keycloak::postgres::port: 5432
keycloak::postgres::dbname: keycloak
keycloak::postgres::user: keycloak
# keycloak::postgres::password in private-data
keycloak::resources::realms::common_settings:
remember_me: true
login_with_email_allowed: true
internationalization_enabled: true
supported_locales:
- en
account_theme: swh
admin_theme: swh
login_theme: swh
smtp_server_host: localhost
smtp_server_from: noreply@softwareheritage.org
smtp_server_from_display_name: Software Heritage Authentication Service
brute_force_protected: true
# set OIDC refresh token expiration to one week (avoid relogin with credentials often)
sso_session_idle_timeout: 604800
# set OIDC session max duration to one month
sso_session_max_lifespan: 2592000
keycloak::resources::clients::common_settings:
public_client: true
login_theme: swh
default_client_scopes:
- profile
- email
- roles
- web-origins
optional_client_scopes:
- microprofile-jwt
- offline_access
keycloak::resources::clients::swh_web:::roles:
- swh.web.api.throttling_exempted
- swh.web.api.graph
- swh.vault.git_bare.ui
- swh.web.api.save_origin
- swh.web.admin.list_deposits
- swh.web.mailmap
- swh.web.search_ql
- swh.web.add_forge_now.moderator
- swh.web.admin.mailmap
- swh.web.api.raw_object
keycloak::resources::clients::swh_deposit::roles:
- swh.deposit.api
keycloak::resources::protocol_mappers::audience:
resource_name: audience
type: oidc-audience-mapper
included_client_audience: __client_id__
keycloak::resources::protocol_mappers::groups:
resource_name: groups
type: oidc-group-membership-mapper
claim_name: groups
full_path: true
keycloak::resources::realms:
master:
settings:
display_name: master
SoftwareHeritage:
settings:
display_name: Software Heritage
registration_allowed: true
reset_password_allowed: true
verify_email: true
roles:
- offline_access
- uma_authorization
- swh.ambassador
clients:
swh-web:
settings:
redirect_uris:
# Should match letsencrypt::certificates.archive_production.domains
- https://archive.softwareheritage.org/*
- https://base.softwareheritage.org/*
- https://archive.internal.softwareheritage.org/*
roles: "%{alias('keycloak::resources::clients::swh_web:::roles')}"
protocol_mappers:
- "%{alias('keycloak::resources::protocol_mappers::audience')}"
- "%{alias('keycloak::resources::protocol_mappers::groups')}"
swh-deposit:
settings:
redirect_uris:
# Should match letsencrypt::certificates.archive_staging.domains
- https://deposit.softwareheritage.org/*
- https://deposit.internal.softwareheritage.org/*
roles: "%{alias('keycloak::resources::clients::swh_deposit::roles')}"
protocol_mappers:
- "%{alias('keycloak::resources::protocol_mappers::audience')}"
- "%{alias('keycloak::resources::protocol_mappers::groups')}"
SoftwareHeritageStaging:
settings:
display_name: Software Heritage (Staging)
registration_allowed: true
reset_password_allowed: true
verify_email: true
roles:
- offline_access
- uma_authorization
- swh.ambassador
clients:
swh-web:
settings:
redirect_uris:
# Should match letsencrypt::certificates.archive_staging.domains
- https://webapp.staging.swh.network/*
- https://webapp.internal.staging.swh.network/*
roles: "%{alias('keycloak::resources::clients::swh_web:::roles')}"
protocol_mappers:
- "%{alias('keycloak::resources::protocol_mappers::audience')}"
- "%{alias('keycloak::resources::protocol_mappers::groups')}"
swh-deposit:
settings:
redirect_uris:
# Should match letsencrypt::certificates.archive_staging.domains
- https://deposit.staging.swh.network/*
- https://deposit.internal.staging.swh.network/*
roles: "%{alias('keycloak::resources::clients::swh_deposit::roles')}"
protocol_mappers:
- "%{alias('keycloak::resources::protocol_mappers::audience')}"
- "%{alias('keycloak::resources::protocol_mappers::groups')}"
borg::repository_user: borg
borg::repository_group: borg
borg::base_path: /srv/borg
borg::repository_path: "%{lookup('borg::base_path')}/repositories"
borg::repository_server: banco.internal.softwareheritage.org
borg::encryption: repokey-blake2
swh::deploy::base_counters::config_directory: "%{hiera('swh::conf_directory')}/counters"
swh::deploy::base_counters::user: swhstorage
swh::deploy::base_counters::group: swhstorage
swh::deploy::counters::conf_file: "%{hiera('swh::deploy::base_counters::config_directory')}/server.yml"
swh::deploy::counters::sentry_swh_package: swh.counters
swh::deploy::counters::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::counters::sentry_dsn: "https://%{lookup('swh::deploy::counters::sentry_token')}@sentry.softwareheritage.org/19"
swh::deploy::counters::user: "%{alias('swh::deploy::base_counters::user')}"
swh::deploy::counters::group: "%{alias('swh::deploy::base_counters::group')}"
swh::deploy::counters::cache_directory: /srv/softwareheritage/counters
swh::deploy::counters::refresh_cache::activate: true
swh::deploy::counters::history_interval: 12h
swh::deploy::counters::live_data_start: 1618415227 # last point of the historical file
swh::deploy::counters::cache_static_file: static_history.json
swh::deploy::counters::refresh_cache::cron:
minute: 0
hour: "*/4"
swh::deploy::counters::backend::listen::host: 0.0.0.0
swh::deploy::counters::backend::listen::port: "%{alias('swh::remote_service::counters::port')}"
swh::deploy::counters::backend::workers: 2
swh::deploy::counters::backend::reload_mercy: 3600
swh::deploy::counters::backend::http_keepalive: 5
swh::deploy::counters::backend::http_timeout: 3600
swh::deploy::counters::backend::max_requests: 10000
swh::deploy::counters::backend::max_requests_jitter: 1000
swh::deploy::counters::backend::server_names:
- "%{::swh_hostname.internal_fqdn}"
- "%{::hostname}"
- 127.0.0.1
- localhost
- "::1"
swh::deploy::counters::config:
counters:
cls: redis
host: localhost:6379
history:
cls: prometheus
prometheus_host: thanos.internal.admin.swh.network
prometheus_port: 19191
live_data_start: "%{alias('swh::deploy::counters::live_data_start')}"
cache_base_directory: "%{alias('swh::deploy::counters::cache_directory')}"
interval: "%{alias('swh::deploy::counters::history_interval')}"
labels:
environment: "%{alias('swh::deploy::environment')}"
swh::deploy::counters::journal_client::config_file: "%{lookup('swh::deploy::base_counters::config_directory')}/journal_client.yml"
swh::deploy::counters::journal_client::config:
counters:
cls: remote
url: http://localhost:5011
journal:
brokers: "%{alias('swh::deploy::journal::brokers')}"
group_id: swh.counters.journal_client
prefix: swh.journal.objects
object_types:
- content
- directory
- origin
- origin_visit
- origin_visit_status
- release
- revision
- skipped_content
- snapshot
message.max.bytes: 524288000
swh::deploy::base_search::config_directory: "%{hiera('swh::conf_directory')}/search"
swh::deploy::base_search::user: swhstorage
swh::deploy::base_search::group: swhstorage
swh::deploy::search::sentry_swh_package: swh.search
swh::deploy::search::sentry_environment: "%{alias('swh::deploy::environment')}"
swh::deploy::search::sentry_dsn: "https://%{lookup('swh::deploy::search::sentry_token')}@sentry.softwareheritage.org/15"
swh::deploy::search::conf_file: "%{hiera('swh::deploy::base_search::config_directory')}/server.yml"
swh::deploy::search::user: "%{alias('swh::deploy::base_search::user')}"
swh::deploy::search::group: "%{alias('swh::deploy::base_search::group')}"
swh::deploy::search::index: origin-v0.11
swh::deploy::search::read_alias: origin-read
swh::deploy::search::write_alias: origin-write
swh::deploy::search::config:
search:
cls: elasticsearch
hosts: "%{alias('swh::elasticsearch::search_nodes')}"
indexes:
origin:
index: "%{alias('swh::deploy::search::index')}"
read_alias: "%{alias('swh::deploy::search::read_alias')}"
write_alias: "%{alias('swh::deploy::search::write_alias')}"
swh::deploy::search::journal_client::service_types:
- objects
- indexed
swh::deploy::search::journal_client::objects::config_file: "%{lookup('swh::deploy::base_search::config_directory')}/journal_client_objects.yml"
swh::deploy::search::journal_client::objects::consumer_group: swh.search.journal_client-v0.11
swh::deploy::search::journal_client::objects::config:
search: "%{alias('swh::remote_service::search::config')}"
journal:
brokers: "%{alias('swh::deploy::journal::brokers')}"
group_id: "%{alias('swh::deploy::search::journal_client::objects::consumer_group')}"
prefix: swh.journal.objects
object_types:
- origin
- origin_visit_status
storage: "%{alias('swh::remote_service::storage::config')}"
swh::deploy::search::journal_client::indexed::config_file: "%{lookup('swh::deploy::base_search::config_directory')}/journal_client_indexed.yml"
swh::deploy::search::journal_client::indexed::consumer_group: swh.search.journal_client.indexed-v0.11
swh::deploy::search::journal_client::indexed::config:
search: "%{alias('swh::remote_service::search::config')}"
journal:
brokers: "%{alias('swh::deploy::journal::brokers')}"
group_id: "%{alias('swh::deploy::search::journal_client::indexed::consumer_group')}"
prefix: swh.journal.indexed
object_types:
- origin_intrinsic_metadata
- origin_extrinsic_metadata
storage: "%{alias('swh::remote_service::storage::config')}"
swh::deploy::search::backend::listen::host: "%{::fqdn}"
swh::deploy::search::backend::listen::port: "%{alias('swh::remote_service::search::port')}"
swh::deploy::search::backend::workers: 4
swh::deploy::search::backend::reload_mercy: 3600
swh::deploy::search::backend::http_keepalive: 5
swh::deploy::search::backend::http_timeout: 3600
swh::deploy::search::backend::max_requests: 10000
swh::deploy::search::backend::max_requests_jitter: 1000
swh::deploy::search::backend::server_names:
- "%{::swh_hostname.internal_fqdn}"
- "%{::hostname}"
- 127.0.0.1
- localhost
- "::1"
netbox::version: "3.3.0"
netbox::user: netbox
netbox::db::host: db1.internal.admin.swh.network
netbox::db::port: 5432
netbox::db::database: netbox
netbox::db::username: netbox
# netbox::db::password: in private-data
netbox::mail::host: "%{lookup('smtp::relay_hostname')}"
netbox::mail::from: inventory@softwareheritage.org
netbox::redis::host: localhost
netbox::redis::port: 6379
# netbox::redis::password in private-data
# netbox::redis_cache::password in private-data
# netbox::secret_key in private-data
netbox::admin::email: sysop+netbox@softwareheritage.org
# netbox::admin::password in private-data
# netbox::admin::api_token in private-data
netbox::webhook_enabled: true
netbox::gunicorn::binding: 127.0.0.1
netbox::gunicorn::port: 8001
netbox::data_directory: /var/lib/netbox
netbox::allowed_hosts:
- "*"
- "localhost"
netbox::vhost::letsencrypt_cert: inventory.internal.admin.swh.network
netbox::vhost::name: inventory.internal.admin.swh.network
netbox::backend::url: "http://localhost:%{hiera('netbox::gunicorn::port')}/"
netbox::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}"
netbox::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}"
netbox::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}"
netbox::vhost::hsts_header: "%{hiera('apache::hsts_header')}"
opnsense::hosts:
pushkin:
fqdn: pushkin.internal.softwareheritage.org
ip: 192.168.100.2
glyptotek:
fqdn: glyptotek.internal.softwareheritage.org
ip: 192.168.100.3
opnsense::prometheus::port: 9100
opnsense::prometheus::metrics_path: /metrics
prometheus::elasticsearch::exporter::version: "%{lookup('elastic::elk_version')}.0"
nodejs::version: 14.x
swh::provenance::db::shared_buffers: 32GB
swh::postgresql::version: "11"
swh::postgresql::max_connections: 100
postgresql::globals::version: "%{lookup('swh::postgresql::version')}"
sanoid::templates:
pg_backup:
frequent_period: 0
pre_snapshot_script: /usr/local/bin/start_pg_backup.sh
post_snapshot_script: /usr/local/bin/stop_pg_backup.sh
script_timeout: 60
monthly: 3
daily: 30
hourly: 0
frequently: 0
autoprune: yes
pg_wal_backup:
frequent_period: 0
monthly: 3
daily: 30
hourly: 0
frequently: 0
autoprune: yes
backup: # from the default sanoid configuration
autoprune: yes
frequently: 0
hourly: 30
daily: 30
monthly: 3
yearly: 0
### don't take new snapshots - snapshots on backup
### datasets are replicated in from source, not
### generated locally
autosnap: no
### monitor hourlies and dailies, but don't warn or
### crit until they're over 48h old, since replication
### is typically daily only
hourly_warn: 2880
hourly_crit: 3600
daily_warn: 48
daily_crit: 60
syncoid::configuration: {}
syncoid::default_frequency: 5min
zfs::docker::zpool_configuration:
disk: vdb
# Must have matching certificate in letsencrypt::certificates
azure_billing::vhost::name: azure-billing.internal.admin.swh.network
azure_billing::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}"
azure_billing::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}"
azure_billing::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}"
azure_billing::vhost::hsts_header: "%{hiera('apache::hsts_header')}"
swh::deploy::maven_index_exporter::url: maven-exporter.internal.softwareheritage.org
maven_index_exporter::vhost::name: "%{lookup('swh::deploy::maven_index_exporter::url')}"
maven_index_exporter::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}"
maven_index_exporter::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}"
maven_index_exporter::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}"
maven_index_exporter::vhost::hsts_header: "%{hiera('apache::hsts_header')}"
maven_index_exporter::image::name: softwareheritage/maven-index-exporter
maven_index_exporter::image::version: v0.3.0
maven_index_exporter::repositories:
maven-central: https://repo1.maven.org/maven2/
clojars: http://clojars.org/repo/
sonatype: http://oss.sonatype.org/content/repositories/releases/
jboss: https://repository.jboss.org/maven2/
atlassian-public: https://maven.atlassian.com/public/

File Metadata

Mime Type
text/x-diff
Expires
Tue, Jun 3, 7:43 AM (4 d, 14 h ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3282280

Event Timeline