diff --git a/Puppetfile b/Puppetfile index 2c88109e..48e793b8 100644 --- a/Puppetfile +++ b/Puppetfile @@ -1,196 +1,192 @@ mod 'dar', :git => 'https://forge.softwareheritage.org/source/puppet-swh-dar', :branch => :control_branch, :default_branch => 'master' mod 'gunicorn', :git => 'https://forge.softwareheritage.org/source/puppet-swh-gunicorn', :branch => :control_branch, :default_branch => 'master' mod 'mediawiki', :git => 'https://forge.softwareheritage.org/source/puppet-swh-mediawiki', :branch => :control_branch, :default_branch => 'master' mod 'postfix', :git => 'https://forge.softwareheritage.org/source/puppet-swh-postfix', :branch => :control_branch, :default_branch => 'master' mod 'uwsgi', :git => 'https://forge.softwareheritage.org/source/puppet-swh-uwsgi', :branch => :control_branch, :default_branch => 'master' mod 'apt', :git => 'https://forge.softwareheritage.org/source/puppet-puppetlabs-apt', :tag => 'v7.0.1' mod 'archive', :git => 'https://forge.softwareheritage.org/source/puppet-puppet-archive', :tag => 'v2.3.0' mod 'bind', :git => 'https://forge.softwareheritage.org/source/puppet-inkblot-bind', :ref => '7.3.1' mod 'apache', :git => 'https://forge.softwareheritage.org/source/puppet-puppetlabs-apache', :tag => 'v5.2.0' mod 'ceph', :git => 'https://forge.softwareheritage.org/source/puppet-openstack-ceph', :ref => 'master' mod 'concat', :git => 'https://forge.softwareheritage.org/source/puppet-puppetlabs-concat', :tag => '3.0.0' mod 'cups', :git => 'https://forge.softwareheritage.org/source/puppet-mosen-cups', :ref => 'master' mod 'datacat', :git => 'https://forge.softwareheritage.org/source/puppet-richardc-datacat', :ref => '0.6.2' mod 'debconf', :git => 'https://forge.softwareheritage.org/source/puppet-stm-debconf', :ref => 'v2.1.0' mod 'debnet', :git => 'https://forge.softwareheritage.org/source/puppet-trepasi-debnet', :ref => '8d856df078352a8848a43ca0ee9f2ef9086b343a' mod 'docker', :git => 'https://forge.softwareheritage.org/source/puppet-puppetlabs-docker', :ref => 'v3.8.0' mod 'elasticsearch', :git => 'https://forge.softwareheritage.org/source/puppet-elastic-elasticsearch', :ref => '6.2.2' mod 'extlib', :git => 'https://forge.softwareheritage.org/source/puppet-puppet-extlib', :tag => 'v2.0.1' mod 'grafana', :git => 'https://forge.softwareheritage.org/source/puppet-puppet-grafana', :tag => 'v6.0.0' mod 'hitch', :git => 'https://forge.softwareheritage.org/source/puppet-ssm-hitch', :ref => 'feature/additional-config-0.1.5' mod 'icinga2', :git => 'https://forge.softwareheritage.org/source/puppet-icinga-icinga2', :tag => 'v2.3.0' mod 'icingaweb2', :git => 'https://forge.softwareheritage.org/source/puppet-icinga-icingaweb2', :tag => 'v2.3.1' mod 'inifile', :git => 'https://forge.softwareheritage.org/source/puppet-puppetlabs-inifile', :ref => '2.2.0' mod 'java', :git => 'https://forge.softwareheritage.org/source/puppet-puppetlabs-java', :tag => 'v5.0.1' mod 'kafka', :git => 'https://forge.softwareheritage.org/source/puppet-puppet-kafka', :ref => 'v5.3.0' mod 'letsencrypt', :git => 'https://forge.softwareheritage.org/source/puppet-puppet-letsencrypt', :ref => 'v4.0.0' mod 'locales', :git => 'https://forge.softwareheritage.org/source/puppet-saz-locales', :ref => 'v2.5.0' -mod 'munin', - :git => 'https://forge.softwareheritage.org/source/puppet-ssm-munin', - :ref => '0.1.0' - mod 'mysql', :git => 'https://forge.softwareheritage.org/source/puppet-puppetlabs-mysql', :ref => '5.3.0' mod 'nginx', :git => 'https://forge.softwareheritage.org/source/puppet-puppet-nginx', :ref => 'v0.11.0' mod 'ntp', :git => 'https://forge.softwareheritage.org/source/puppet-puppetlabs-ntp', :ref => '6.4.1' mod 'php', :git => 'https://forge.softwareheritage.org/source/puppet-puppet-php', :ref => 'v5.3.0' mod 'postgresql', :git => 'https://forge.softwareheritage.org/source/puppet-puppetlabs-postgresql', :ref => 'v6.2.0' mod 'pgbouncer', :git => 'https://forge.softwareheritage.org/source/puppet-covermymeds-pgbouncer', :ref => '9ec0d8a1255bbb309c2ff38f229167209cad496b' mod 'puppet', :git => 'https://forge.softwareheritage.org/source/puppet-theforeman-puppet', :ref => 'latest_passenger' mod 'puppetdb', :git => 'https://forge.softwareheritage.org/source/puppet-puppetlabs-puppetdb', :ref => '6.0.2' mod 'memcached', :git => 'https://forge.softwareheritage.org/source/puppet-saz-memcached', :ref => 'v3.1.0' mod 'rabbitmq', :git => 'https://forge.softwareheritage.org/source/puppet-puppet-rabbitmq', :ref => 'v9.0.0' mod 'resolv_conf', :git => 'https://forge.softwareheritage.org/source/puppet-saz-resolv_conf', :ref => 'v3.3.0' mod 'ssh', :git => 'https://forge.softwareheritage.org/source/puppet-saz-ssh', :ref => 'v3.0.1' mod 'stdlib', :git => 'https://forge.softwareheritage.org/source/puppet-puppetlabs-stdlib', :ref => '4.25.0' mod 'sudo', :git => 'https://forge.softwareheritage.org/source/puppet-saz-sudo', :ref => 'v5.0.0' mod 'systemd', :git => 'https://forge.softwareheritage.org/source/puppet-camptocamp-systemd', :ref => '1.1.1' mod 'timezone', :git => 'https://forge.softwareheritage.org/source/puppet-saz-timezone', :ref => 'v4.1.1' mod 'unattended_upgrades', :git => 'https://forge.softwareheritage.org/source/puppet-puppet-unattended_upgrades', :ref => 'v3.1.0' mod 'varnish', :git => 'https://forge.softwareheritage.org/source/puppet-claranet-varnish', :ref => 'bugfix/systemd-unit' mod 'vcsrepo', :git => 'https://forge.softwareheritage.org/source/puppet-puppetlabs-vcsrepo', :ref => 'v3.0.0' mod 'zookeeper', :git => 'https://forge.softwareheritage.org/source/puppet-deric-zookeeper', :ref => 'v0.7.7' diff --git a/data/defaults.yaml b/data/defaults.yaml index 501a5ac9..e08acb7d 100644 --- a/data/defaults.yaml +++ b/data/defaults.yaml @@ -1,3037 +1,2999 @@ --- ### See also defaults_security.yaml for public key/cert fingerprint blocks ### dns::local_cache: true dns::nameservers: - 127.0.0.1 dns::search_domains: - internal.softwareheritage.org - softwareheritage.org dns::forward_zones: 'internal.softwareheritage.org.': "%{alias('dns::local_nameservers')}" '100.168.192.in-addr.arpa.': "%{alias('dns::local_nameservers')}" '101.168.192.in-addr.arpa.': "%{alias('dns::local_nameservers')}" '200.168.192.in-addr.arpa.': "%{alias('dns::local_nameservers')}" 'internal.staging.swh.network.': "%{alias('dns::local_nameservers')}" '128.168.192.in-addr.arpa.': "%{alias('dns::local_nameservers')}" # dns::forwarders per-location. No Default value # dns::local_nameservers per-location. No Default value # ntp::servers per-location. Default value: ntp::servers: - 0.debian.pool.ntp.org - 1.debian.pool.ntp.org - 2.debian.pool.ntp.org - 3.debian.pool.ntp.org sudo::configs: {} # smtp::relayhost is per-location. Default value: smtp::relayhost: '[pergamon.internal.softwareheritage.org]' smtp::mydestination: - "%{::fqdn}" smtp::mynetworks: - 127.0.0.0/8 - "[::ffff:127.0.0.0]/104" - "[::1]/128" smtp::relay_destinations: [] smtp::virtual_aliases: [] smtp::mail_aliases: - user: anlambert aliases: - antoine.lambert33@gmail.com - user: ardumont aliases: - antoine.romain.dumont@gmail.com - user: ddouard aliases: - david.douard@sdfa3.org - user: olasd aliases: - nicolas+swhinfra@dandrimont.eu - user: morane aliases: - morane.gg@gmail.com - user: postgres aliases: - root - user: rdicosmo aliases: - roberto@dicosmo.org - user: root aliases: - olasd - zack - ardumont - user: seirl aliases: - antoine.pietri1@gmail.com - user: swhstorage aliases: - root - user: swhworker aliases: - zack - olasd - ardumont - user: swhdeposit aliases: - ardumont - user: zack aliases: - zack@upsilon.cc - user: vlorentz aliases: - valentin.lorentz@inria.fr - user: haltode aliases: - haltode@gmail.com locales::default_locale: C.UTF-8 locales::installed_locales: - C.UTF-8 UTF-8 - en_US.UTF-8 UTF-8 - fr_FR.UTF-8 UTF-8 - it_IT.UTF-8 UTF-8 timezone: Etc/UTC packages: - acl - etckeeper - git - htop - ipython3 - molly-guard - moreutils - ncdu - nfs-common - python3 - ruby-filesystem - strace - tmux - vim - zsh packages::desktop: - autojump - chromium - curl - emacs - ethtool - gnome - i3 - ii - libx11-dev - mosh - myrepos - net-tools - ruby-dev - rxvt-unicode-256color - screen - scrot - tcpdump - tree - vim-nox - weechat - weechat-scripts packages::devel: - arcanist - elpa-magit - git-email - gitg - gitk - ltrace - perl-doc packages::devel::debian: - devscripts - dpkg-dev - reprepro - sbuild packages::devel::python: - graphviz - make - python3-arrow - python3-azure-storage - python3-blinker - python3-celery - python3-cffi - python3-click - python3-dateutil - python3-dev - python3-dulwich - python3-flake8 - python3-flask - python3-flask-api - python3-flask-limiter - python3-flask-testing - python3-libcloud - python3-msgpack - python3-nose - python3-psycopg2 - python3-pygit2 - python3-requests - python3-retrying - python3-sphinx - python3-subvertpy - python3-vcversioner - python3-venv - python3-wheel packages::devel::broker: - rabbitmq-server packages::devel::postgres: - apgdiff - barman - check-postgres - libpq-dev - postgresql - postgresql-autodoc - postgresql-client - postgresql-contrib - postgresql-doc - postgresql-plpython3-11 users: root: uid: 0 full_name: shell: /bin/bash groups: [] authorized_keys: root@louvre: type: ssh-rsa key: AAAAB3NzaC1yc2EAAAADAQABAAABAQDMLEWHlUQldlvZs5rg0y42lRNAfOhD+6pmO8a73DzpJWHTqvAlfteLpU78IPjSacB4dO5ish1E/1RX/HC+Bt8p2v4RBqbCnVLx2w+Hx4ahWu6qbeTVmTz+U++1SQrHnL08fSlhT0OekCw0lRZM2sQq21FZi6+vul97Ecikag4Xaw6Qfumylu94pM3t05uzTUlKk1+6VMCjhT8dlSe8VS8OirVQpE/OqYtTMAWtQaMXGHPCsqDdYRAKzkJ8GjH7ydZmX5VCRyqS0RvPKAlcJfLCs5HBtv0u5rbeGtiHhuzhj/j3YgS/6NJOC2mUfcetcDOMPLnhkKpnF0vUAzTsJ7aR root@banco: type: ssh-ed25519 key: AAAAC3NzaC1lZDI1NTE5AAAAIDcljv9eR52wJsu9yYan6/riIQw70lQuyz+Qt0XpGXMs zack: uid: 1000 full_name: Stefano Zacchiroli shell: /usr/bin/zsh groups: - adm - swhdev - swhstorage - swhscheduler - swhdeploy - sudo - gitorious - swhteam authorized_keys: zack-software-heritage: type: ssh-rsa key: AAAAB3NzaC1yc2EAAAADAQABAAACAQDU0O8tkUqtQCelLEatOGfGpx1sIwHPSMA+7OdXoZjZG5pT9Sfgf3ITsNgo1iYWge5bpH/TKhhvf20B05fa8cCEE5ULaD+xdV9eTIvBEaCiP36HH33WNl/UV8T8klTG2sqBXUgLMJuinfGkuRJ977ndm7mjNwzl3Ghf6JwKfpHrvob4GLc0hm54yzcnNEzQZLcdxmOCWdwTINKnL+W/DDM8NR3vNF6T5+xaiLJzsS0IGcTubklugD3m05qbswS/uACWys3FzRM8tttw/0wCRrC9SCSKoDLonab5y3Ld6vCj1k12J2RAHSqJYwVCm70JRPWZcmU67Udi6kbqkJMftp04K0pplu8V7RLPrpwLyH4sPx7Kkhslvxqj0rerLPOkoDkqneFgxNoMcxN5ayod7fBJAq5jQUmGozeTtgPLKybnxRDhsYpkEH9paZroQ3CqDsA0dptOpedVpcQUSbiLMaYd8kgCPkVIdKANnTGGXDcTfWv21IvFx6sKm1kld2Me3ExVMq7JFcmXutF/IQom9F4vj/xd/7Lt4KmqZKyiAq4n5iaPIRUbZvmwd2D6umOHpMGlqKwtsiWRUYnAVvhRfuSZmgrGgliYiYr+vU2xeWe+XXQhP9vt3eItmdSp/8/+a2lqaIE9slE75hEI2n8in7DeSn6QhFDbyUKwZz5OwK7QVw== olasd: uid: 1001 full_name: Nicolas Dandrimont shell: /bin/bash groups: - adm - swhdev - swhstorage - swhscheduler - swhdeploy - sudo - gitorious - swhteam authorized_keys: nicolasd@darboux: type: ssh-rsa key: AAAAB3NzaC1yc2EAAAADAQABAAABAQDZ1TCpfzrvxLhEMhxjbxqPDCwY0nazIr1cyIbhGD2bUdAbZqVMdNtr7MeDnlLIKrIPJWuvltauvLNkYU0iLc1jMntdBCBM3hgXjmTyDtc8XvXseeBp5tDqccYNR/cnDUuweNcL5tfeu5kzaAg3DFi5Dsncs5hQK5KQ8CPKWcacPjEk4ir9gdFrtKG1rZmg/wi7YbfxrJYWzb171hdV13gSgyXdsG5UAFsNyxsKSztulcLKxvbmDgYbzytr38FK2udRk7WuqPbtEAW1zV4yrBXBSB/uw8EAMi+wwvLTwyUcEl4u0CTlhREljUx8LhYrsQUCrBcmoPAmlnLCD5Q9XrGH ardumont: uid: 1003 full_name: Antoine R. Dumont shell: /usr/bin/zsh groups: - adm - swhdev - swhstorage - swhscheduler - swhdeploy - sudo - gitorious - swhteam authorized_keys: eniotna.t@gmail.com: type: ssh-rsa key: AAAAB3NzaC1yc2EAAAADAQABAAABAQDZarzgHrzUYspvrgSI6fszrALo92BDys7QOkJgUfZa9t9m4g7dUANNtwBiqIbqijAQPmB1zKgG6QTZC5rJkRy6KqXCW/+Qeedw/FWIbuI7jOD5WxnglbEQgvPkkB8kf1xIF7icRfWcQmK2je/3sFd9yS4/+jftNMPPXkBCxYm74onMenyllA1akA8FLyujLu6MNA1D8iLLXvz6pBDTT4GZ5/bm3vSE6Go8Xbuyu4SCtYZSHaHC2lXZ6Hhi6dbli4d3OwkUWz+YhFGaEra5Fx45Iig4UCL6kXPkvL/oSc9KGerpT//Xj9qz1K7p/IrBS8+eA4X69bHYYV0UZKDADZSn ardumont@louvre: type: ssh-rsa key: AAAAB3NzaC1yc2EAAAADAQABAAABAQC0Xj8nwGWTb6VGFNIrlhVTLX6VFTlvpirjdgOTOz8riRxBTS9ra35g3cz8zfDl0iVyE455GXzxlm33w/uu3DX0jQOIzkcoEBRw+T33EK89lo6tCCd9xQrteWCTNR1ZBFloHSnYk2m7kw9kyrisziyAdULsCrXmMd3BH1oJyEpISA+sv/dtVpIOWdEQmkbLmdHl2uEdjBLjqb3BtAp2oJZMmppE5YjAx0Aa1+7uSnURf7NnwMx+0wTDMdfqn8z4wqI8eQny+B+bqLH9kY++52FfMVALuErGh5+75/vtd2xzRQamjKsBlTGjFFbMRagZiVNLDX2wtdudhNmnQDIKA+rH swhworker: uid: 1004 full_name: SWH Worker Acccount shell: /bin/bash groups: - swhdeploy - gitorious swhstorage: uid: 1005 full_name: SWH Storage Account shell: /bin/bash groups: - swhdeploy - swhstorage swhwebapp: uid: 1006 full_name: SWH Web App Account shell: /bin/bash groups: [] swhbackup: uid: 1007 full_name: SWH Backup Account shell: /bin/bash groups: [] rdicosmo: uid: 1008 full_name: Roberto Di Cosmo shell: /bin/bash groups: - swhteam authorized_keys: dicosmo@voyager: type: ssh-rsa key: AAAAB3NzaC1yc2EAAAADAQABAAACAQC5aS/3Cps2Ru9EW+nIF9Z9o6/xq1thwtCgpIjSPgcrm2BVisj6xbD5OOapS3U6BpLKjWZG8sMGBCsJJ3S1cP0s2I+xHFToqCcbfOxIe/tq/UgTtxGJ0+TfUKNoD+QJjIKnjyC+HVEQm5bSm8mJv0vptj4On8yNopytSGuLcFHHnMB2t+IOkHnTW7n3emhh3SZKAcpI1h7WvPqsqBobMFDMeqvGeHaH2AM2OSoUi7AY+MmcVL0Je6QtJqpz60QI5dvaM4AsobC12AZSJKXnuqQTY6nJy4r9jPRK8RUqo5PuAAsNtlxf5xA4s1LrDR5PxBDpYz47Pq2LHtI9Hgf/SFB3IqZeBKqquMI1xThRBwP307/vOtTiwJr4ZKcpOH+SbU7Tnde4n8siM719QZM8VITtrbwm/VBiEwvhGC/23npX4S55W7Et/l9gmeP3Q+lSw50vBuQhBSn7BzedPM1CqbTN/zqM8TCDUtPVIo+6b2s5ao/Vcq9vBXm5bP0xZeNsqsCl05zpCShudKpT6AlMGAaRTd6NUHHsf4D1JjNx3v42R3vQr6OgHELVMGECuyPs3zWHOS/P6AdD0yJTSOMaklRh2HGN8uj0+aQ7RhnrkYqRfhN+6UkrTANuxdb44AGdLmBAKIYglVrAJe+DEji/LzJdZ22baAWg4ar/WikpFJtxkw== swhteamannex: uid: 1009 full_name: SWH Team Git Annex Account shell: /bin/bash groups: - swhteam authorized_keys: swhteamannex@louvre: type: ssh-rsa key: AAAAB3NzaC1yc2EAAAADAQABAAACAQDL/Ef9kktq/QkJ0lohan8ObQ3o7hMf7EOQPjO+u7UhIMjBNufJnaftQRGIA6N1/wEsDvxxNNz75/oJECJHgZs2OOTJJPsKfYeybmSBocSa/sn6IKK7/b/qlwHJlSGWPGVgbtfP0KexlSAKAmKZuJyqjES5igTLV5w4wTjvUUen9QyefuUehnCX3MJhTpoyixp7icXE80aNMaCPwHZppKb/28sNlPX3MbSONGM45wSFRXNuj0mAAjrgojkhAqFNnI9oKNAt9mDcw1hV0n86VvrDhEbMCJP/z58ecn376BgyXQ8zNUPIr2g0SrSPUNjfxZHfJ0XYpF7624wOMOmZE3fsQzZh+FeMF0IFRPvcG34RaelV9dXiy+/q45oqwbMF464gFSbyt++4jpgqHdsARM4zR//jBhyLvFXR+GaKC3hFENno5W5Raff4XE5rzN/q9jVJBNfvfuEPWrapyM3A/ePeuK3SyNJwyIx+bOEQXsRdxEWKszTeJO2SLPWtCrKrC+G4/HktQSQOj5S9a+N6HoKD8E889eBEYoeZGPIuzMot4cuUlyPt3P99z4oRIaeC6XwUCvZCD2DaTAkQWQMsmOn+soaeZ1zBHbsCBbV0mBMRx7K4Vjs62vhSelryQAXW+cBgd6+f5XBjOnNhHQhsNsDfYP4Kmztn58faQV2TzGG5ow== swhscheduler: uid: 1010 full_name: SWH Scheduler Account shell: /bin/bash groups: - swhscheduler jbertran: uid: 2001 full_name: Jordi Bertran de Balanda shell: /bin/false groups: [] password: "!" qcampos: uid: 2002 full_name: Quentin Campos shell: /bin/false groups: [] password: "!" gitorious: uid: 5000 full_name: Gitorious System User shell: /bin/false groups: - gitorious fiendish: uid: 1011 full_name: Avi Kelman shell: /bin/false groups: [] password: "!" morane: uid: 1012 full_name: Morane Otilia Gruenpeter shell: /bin/bash groups: - swhdev - swhstorage - swhteam authorized_keys: morane.gg@gmail.com: type: ssh-rsa key: AAAAB3NzaC1yc2EAAAADAQABAAABAQDm8kH1pP+4ENKmpkTCkL2ashxxnOFVndGrcvfX05lV1hOo2NdItpdoR9txIgFEs3d7v73mtH4nWciUyaK7FIByrtvsR2TIhdVgEcb0Xai8viV+sDMTndpiNlWNilbfxm0K70tgpG4BeSWRJy8cPxnCR9CWoB2Vo9Df7lDKz1LXDgfY4VLJd69ahf1DPFUDjpWIEQdPFX2ZyGUYM+0yPXIoyYW/qreDt1JkYZXXVbRAV8j44/TVgTRYJLgYb9ThW6WzlGM1S4uP7GQdAuROCcspqW3ahV/UmV4Z9SM6S34NN182KvM0Ve7uxAPQz+IdWOgZTK0pvd+hfjHKbLSTA6I3 seirl: uid: 1013 full_name: Antoine Pietri shell: /usr/bin/zsh groups: - swhdev - swhstorage - swhteam - swhdeploy authorized_keys: seirl: type: ssh-ed25519 key: AAAAC3NzaC1lZDI1NTE5AAAAILiua8eEg+nU0XSbYPTgnOMftzvpbN+u7v5jDabeO/0E ssushant: uid: 1014 full_name: Sushant shell: /bin/false groups: [] password: "!" anlambert: uid: 1015 full_name: Antoine Lambert shell: /bin/bash groups: - swhdev - swhstorage - swhteam - swhdeploy - swhwebapp authorized_keys: antoine.lambert@inria.fr: type: ssh-rsa key: AAAAB3NzaC1yc2EAAAADAQABAAACAQDLWPcZnSUszEedMa39dT3ZCHpRod3NTs6WT4OfMMRVahrhTtWYdSiNGy8U3kEQveTZvMrb9WLtLPB3K8o7Xrf8WCI8iTOl9eb9DVjE9XL+zS0ZAcEmoZ5YH8e3gEDoDm8ZrMxF+V5XSlvhNi6kbWzJdqhXu++bJHHqGrKUHeTCQCfpYYMrsnvhPjtxe+90BK7e+IGm1Ha8LZMCCmOtz0XggxD8d2mFBaP2p8v9xsM48KfwFvsRMb3TZIaO/+NcsRSTe7wfFAR1pb14pi5LZAHeb2tpWfVH2vQGaE7Rej+Ycf4UOeaRmFGpimw7u7fugvDvKfZ/vs7w7Qs2RtxNdqJf9JM+vvi78OQbloufot1Tz2r19aDbhM9nsCn+Uo3rNfkmD+UcSMKrRJCMEXVBbaY/bgzs7XoqCJ8ODE2U/dF3NtHBZr+CB52iilUtemXy+Xwqw4TSs/r9vW7/XueTdb0Yp/cUs5uLCqCwlMpGS5okorpdJextp5gRuN6EMlUo6PffRiz5T0CqKm1xJu0NeT0EaacAXoGTDQaS4pIQGglqWfAOmjej9dM8gxAF6rgrx70uJt6Hy18tvzdB5iwJ4F2LUjcZhFnrxjUDzhjPoDBiRtPNgEKrCc30OHsveqXwMPo3v/d3np1Vpkum0JEwmp83q92P5T2rbf+wiruxZhhtww== grouss: uid: 1016 full_name: Guillaume Rousseau shell: /bin/bash groups: - swhteam authorized_keys: guillaume.rousseau@univ-paris-diderot.fr: type: ssh-rsa key: AAAAB3NzaC1yc2EAAAADAQABAAABAQC7Akcdxrod/MFcHg53dCf7iZY/ph9MR0tWU08pjMDfU04j1nAgmHmvumYbxBtFNnd0nu4A9YY4nT79273PCE3c6ba3zSGS9DBYhrASGDqHBECrgEREM3YPXpA2NI0FKEZ878Ic3CQlYaOmRoe/QkFpm2j8CMoG4VdKp0EcvV1RCTgWqJY1P4KC30CJUg+OdGRaaqHEoSskjstU5yjbZCC9M90Hz0xO+MsMl/xKdcDmvwbLDMtp/3SKDQeyN4Q7Uu/zZwoZ8FmgEU4Xp7nKN3yCiEB9rqMkP/lLY71hTPHn/GiZnPo4rWL13w3unuI3X0GDpqxPxjt0LZN4xQEGEn+1 ftigeot: uid: 1017 full_name: Francois Tigeot shell: /bin/false password: "!" groups: [] swhdeposit: uid: 1018 full_name: SWH Deposit App Account shell: /bin/bash groups: - swhscheduler swhvault: uid: 1019 full_name: SWH Vault Account shell: /bin/bash groups: - swhdeploy - swhstorage - swhvault ddouard: uid: 1020 full_name: David Douard shell: /bin/bash groups: - swhdev - swhteam - swhscheduler authorized_keys: david.douard@sdfa3.org: type: ssh-rsa key: AAAAB3NzaC1yc2EAAAADAQABAAACAQCoON7De2Bx03owpZfzbOyucZTmyQdm7F+LP4D4H9EyOFxtyMpjH2S9Ve/JvMoFIWGQQlXSkYzRv63Z0BzPLKD2NsYgomcjOLdw1Baxnv8VOH+Q01g4B3cabcP2LMVjerHt/KRkY3E6dnKLQGE5UiER/taQ7KazAwvu89nUd4BJsV43rJ3X3DtFEfH3lR4ZEIgFyPUkVemQAjBhueFmN3w8debOdr7t9cBpnYvYKzLQN+G/kQVFc+fgs+fFOtOv+Az9kTXChfLs5pKPBm+MuGxz4gS3fPiAjY9cN6vGzr7ZNkCRUSUjJ10Hlm7Gf2EN8f+k6iSR4CPeixDcZ+scbCg4dCORqTsliSQzUORIJED9fbUR6bBjF4rRwm5GvnXx5ZTToWDJu0PSHYOkomqffp30wqvAvs6gLb+bG1daYsOLp+wYru3q09J9zUAA8vNXoWYaERFxgwsmsf57t8+JevUuePJGUC45asHjQh/ON1H5PDXtULmeD1GKkjqyaS7SBNbpOWgQb21l3pwhLet3Mq3TJmxVqzGMDnYvQMUCkiPdZq2pDplzfpDpOKLaDg8q82rR5+/tAfB4P2Z9RCOqnMLRcQk9AluTyO1D472Mkp+v5VA4di0eTWZ0tuzwYJEft0OVo+QOVTslCGsyGiEUoOcHzkrdgsT5uQziyAfgTMSuiw== vlorentz: uid: 1021 full_name: Valentin Lorentz shell: /usr/bin/zsh groups: - swhdev - swhteam authorized_keys: valentin.lorentz@inria.fr: type: ssh-ed25519 key: AAAAC3NzaC1lZDI1NTE5AAAAILsRMQjrrfUjX1ka9e6YlyMyDvTC+qk5a21Fp9yXYI7p vlorentz@softwareheritage.org: type: ssh-ed25519 key: AAAAC3NzaC1lZDI1NTE5AAAAIIjJoY4XBTTNsxLVF/sUKBI4WGR2AIiR9qfMdspnsRfJ haltode: uid: 1022 full_name: Thibault Allancon shell: /usr/bin/zsh groups: - swhdev - swhteam authorized_keys: haltode@gmail.com: type: ssh-ed25519 key: AAAAC3NzaC1lZDI1NTE5AAAAIORGwY56PpvgwMWqDei718PPriV6U7LL5JMPJWS7zTcg groups: adm: gid: 4 # assigned from base-files zack: gid: 1000 olasd: gid: 1001 ardumont: gid: 1003 ddouard: gid: 1020 swhworker: gid: 1004 swhdev: gid: 1002 swhstorage: gid: 1005 swhdeploy: gid: 1006 swhbackup: gid: 1007 swhwebapp: gid: 1008 swhteam: gid: 1009 swhscheduler: gid: 1010 sudo: gid: 27 # assigned from base-files gitorious: gid: 5000 swhdeposit: gid: 1018 swhvault: gid: 1019 gunicorn::statsd::host: 127.0.0.1:8125 -munin::node::allow: - - 192.168.100.20 - -munin::node::network: "%{lookup('internal_network')}" - -munin::node::plugins::enable: - - apt - - postfix_mailvolume - - postfix_mailqueue - -munin::node::plugins::disable: - - apt_all - - df_inode - - entropy - - exim_mailstats - - exim_mailqueue - - interrupts - - irqstats - - netstat - - nfs4_client - - nfsd4 - - open_files - - open_inodes - - proc_pri - - vmstat - munin::master::hostname: munin.internal.softwareheritage.org -munin::plugins::rabbitmq::messages_warn: 18000000 -munin::plugins::rabbitmq::messages_crit: 20000000 -munin::plugins::rabbitmq::queue_memory_warn: 1073741824 # 1GB -munin::plugins::rabbitmq::queue_memory_crit: 2147483648 # 2GB - rabbitmq::monitoring::user: swhdev # following password key in private data # - rabbitmq::monitoring::password # - swh::deploy::worker::task_broker::password # - swh::deploy::scheduler::task_broker::password rabbitmq::server::users: - name: "%{hiera('rabbitmq::monitoring::user')}" is_admin: true password: "%{hiera('rabbitmq::monitoring::password')}" tags: [] - name: swhconsumer is_admin: false password: "%{hiera('swh::deploy::worker::task_broker::password')}" tags: [] - name: swhproducer is_admin: false password: "%{hiera('swh::deploy::scheduler::task_broker::password')}" tags: - management puppet::master::hostname: pergamon.internal.softwareheritage.org puppet::master::puppetdb: pergamon.internal.softwareheritage.org puppetdb::master::config::terminus_package: puppet-terminus-puppetdb strict_transport_security::max_age: 15768000 php::version: '7.3' # Those variables get picked up by 'include ::php::fpm::daemon' php::fpm::daemon::log_owner: www-data php::fpm::daemon::log_group: adm php::fpm::daemon::log_dir_mode: '0750' # Those variables get picked up by 'include ::apache' apache::server_tokens: 'Prod' apache::server_signature: 'Off' apache::trace_enable: 'Off' # Those variables get picked up by 'include ::apache::mod::passenger' apache::mod::passenger::passenger_root: /usr/lib/ruby/vendor_ruby/phusion_passenger/locations.ini # Those variables need to be set manually in the SSL vhosts. apache::ssl_protocol: all -SSLv2 -SSLv3 -TLSv1 -TLSv1.1 apache::ssl_honorcipherorder: 'On' apache::ssl_cipher: ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384 apache::hsts_header: "set Strict-Transport-Security \"max-age=%{hiera('strict_transport_security::max_age')}\"" # Those variables need to be set manually for all vhosts apache::http_port: 80 apache::https_port: 443 # Hitch TLS proxy configuration hitch::frontend: "[*]:10443" hitch::proxy_support: false hitch::http2_support: false # Varnish configuration varnish::http_port: 10080 varnish::proxy_port: 6081 varnish::http2_support: false varnish::listen: - ":%{hiera('varnish::http_port')}" - "[::1]:%{hiera('varnish::proxy_port')},PROXY" varnish::backend_http_port: "%{hiera('apache::http_port')}" varnish::admin_listen: 127.0.0.1 varnish::admin_port: 6082 varnish::storage_type: malloc varnish::storage_size: 256m varnish::storage_file: /var/lib/varnish/varnish_storage.bin # varnish::secret in private-data letsencrypt::account_email: sysop+letsencrypt@softwareheritage.org letsencrypt::server: https://acme-v02.api.letsencrypt.org/directory letsencrypt::gandi_livedns_hook::config: gandi_api: https://dns.api.gandi.net/api/v5/ zones: softwareheritage.org: api_key: "%{alias('gandi::softwareheritage_org::api_key')}" sharing_id: "%{alias('gandi::softwareheritage_org::sharing_id')}" letsencrypt::gandi_paas_hook::config: gandi_xmlrpc: https://rpc.gandi.net/xmlrpc/ zone_keys: softwareheritage.org: "%{alias('gandi::softwareheritage_org::xmlrpc_key')}" letsencrypt::certificates::exported_directory: "%{::puppet_vardir}/letsencrypt_exports" letsencrypt::certificates::directory: /etc/ssl/certs/letsencrypt letsencrypt::certificates: stats_export: domains: - stats.export.softwareheritage.org - pergamon.softwareheritage.org jenkins: domains: - jenkins.softwareheritage.org sentry: domains: - sentry.softwareheritage.org www-dev: domains: - www-dev.softwareheritage.org deploy_hook: gandi_paas www: domains: - softwareheritage.org - www.softwareheritage.org deploy_hook: gandi_paas gandi-redirects: domains: - softwareheritage.org - sponsors.softwareheritage.org - sponsorship.softwareheritage.org - testimonials.softwareheritage.org deploy_hook: gandi_paas bind::update_key: local-update bind::zones: internal.softwareheritage.org: domain: internal.softwareheritage.org 100.168.192.in-addr.arpa: domain: 100.168.192.in-addr.arpa 101.168.192.in-addr.arpa: domain: 101.168.192.in-addr.arpa internal.staging.swh.network: domain: internal.staging.swh.network 128.168.192.in-addr.arpa: domain: 128.168.192.in-addr.arpa 200.168.192.in-addr.arpa: domain: 200.168.192.in-addr.arpa 201.168.192.in-addr.arpa: domain: 201.168.192.in-addr.arpa 202.168.192.in-addr.arpa: domain: 202.168.192.in-addr.arpa 203.168.192.in-addr.arpa: domain: 203.168.192.in-addr.arpa 204.168.192.in-addr.arpa: domain: 204.168.192.in-addr.arpa 205.168.192.in-addr.arpa: domain: 205.168.192.in-addr.arpa 206.168.192.in-addr.arpa: domain: 206.168.192.in-addr.arpa 207.168.192.in-addr.arpa: domain: 207.168.192.in-addr.arpa # Defaults for secondary bind server bind::zones::type: slave bind::zones::masters: - 192.168.100.29 bind::zones::allow_transfers: - 192.168.100.0/24 - 192.168.101.0/24 - 192.168.200.22 bind::zones::default_data: zone_type: "%{alias('bind::zones::type')}" dynamic: true masters: "%{alias('bind::zones::masters')}" transfer_source: '' allow_updates: [] update_policies: '' allow_transfers: "%{alias('bind::zones::allow_transfers')}" dnssec: false key_directory: '' ns_notify: true also_notify: '' allow_notify: '' forwarders: '' forward: '' source: '' ns_records: - pergamon.internal.softwareheritage.org. - ns0.euwest.azure.internal.softwareheritage.org. bind::resource_records: archive/CNAME: type: CNAME record: archive.internal.softwareheritage.org data: moma.internal.softwareheritage.org. db/CNAME: type: CNAME record: db.internal.softwareheritage.org data: belvedere.internal.softwareheritage.org. debian/CNAME: type: CNAME record: debian.internal.softwareheritage.org data: pergamon.internal.softwareheritage.org. backup/CNAME: type: CNAME record: backup.internal.softwareheritage.org data: banco.internal.softwareheritage.org. banco/A: record: banco.internal.softwareheritage.org data: 192.168.100.18 beaubourg/A: record: beaubourg.internal.softwareheritage.org data: 192.168.100.32 icinga/CNAME: type: CNAME record: icinga.internal.softwareheritage.org data: pergamon.internal.softwareheritage.org. faitout/CNAME: type: CNAME record: faitout.internal.softwareheritage.org data: prado.internal.softwareheritage.org. graph/CNAME: type: CNAME record: graph.internal.softwareheritage.org data: granet.internal.softwareheritage.org. logstash/CNAME: type: CNAME record: logstash.internal.softwareheritage.org data: logstash0.internal.softwareheritage.org. logstash0/A: record: logstash0.internal.softwareheritage.org data: 192.168.100.19 - munin/CNAME: - type: CNAME - record: munin.internal.softwareheritage.org - data: munin0.internal.softwareheritage.org. - munin0/A: - record: munin0.internal.softwareheritage.org - data: 192.168.100.20 kibana/CNAME: type: CNAME record: kibana.internal.softwareheritage.org data: banco.internal.softwareheritage.org. kibana0/A: record: kibana0.internal.softwareheritage.org data: 192.168.100.50 rabbitmq/CNAME: type: CNAME record: rabbitmq.internal.softwareheritage.org data: saatchi.internal.softwareheritage.org. esnode1/A: record: esnode1.internal.softwareheritage.org data: 192.168.100.61 esnode2/A: record: esnode2.internal.softwareheritage.org data: 192.168.100.62 esnode3/A: record: esnode3.internal.softwareheritage.org data: 192.168.100.63 # VPN hosts zack/A: record: zack.internal.softwareheritage.org data: 192.168.101.6 olasd/A: record: olasd.internal.softwareheritage.org data: 192.168.101.10 ardumont/A: record: ardumont.internal.softwareheritage.org data: 192.168.101.14 ardumont-desktop/A: record: ardumont-desktop.internal.softwareheritage.org data: 192.168.101.158 rdicosmo/A: record: rdicosmo.internal.softwareheritage.org data: 192.168.101.38 petitpalais/A: record: petitpalais.internal.softwareheritage.org data: 192.168.101.154 grand-palais/A: record: grand-palais.internal.softwareheritage.org data: 192.168.101.62 grandpalais/CNAME: type: CNAME record: grandpalais.internal.softwareheritage.org data: grand-palais.internal.softwareheritage.org. giverny/A: type: A record: giverny.internal.softwareheritage.org data: 192.168.101.118 orangeriedev/A: type: A record: orangeriedev.internal.softwareheritage.org data: 192.168.101.130 orangerie/A: type: A record: orangerie.internal.softwareheritage.org data: 192.168.101.142 ddouard-desktop/A: record: ddouard-desktop.internal.softwareheritage.org data: 192.168.101.162 vlorentz-desktop/A: record: vlorentz-desktop.internal.softwareheritage.org data: 192.168.101.166 gateway/A: record: gateway.internal.staging.swh.network data: 192.168.128.1 storage0/A: record: storage0.internal.staging.swh.network data: 192.168.128.2 db0/A: record: db0.internal.staging.swh.network data: 192.168.128.3 scheduler0/A: record: scheduler0.internal.staging.swh.network data: 192.168.128.4 journal0/A: record: journal0.internal.staging.swh.network data: 192.168.128.10 worker0/A: record: worker0.internal.staging.swh.network data: 192.168.128.5 worker1/A: record: worker1.internal.staging.swh.network data: 192.168.128.6 worker2/A: record: worker2.internal.staging.swh.network data: 192.168.128.11 deposit/A: record: deposit.internal.staging.swh.network data: 192.168.128.7 webapp/A: record: webapp.internal.staging.swh.network data: 192.168.128.8 vault/A: record: vault.internal.staging.swh.network data: 192.168.128.9 bind::resource_records::default_data: type: A bind::clients: - 192.168.100.0/24 - 192.168.101.0/24 - 192.168.200.0/21 - 127.0.0.0/8 - '::1/128' bind::autogenerate: - 192.168.100.0/24 - 192.168.200.0/21 dar::backup::enable: true dar::backup::storage: /srv/backups dar::backup::num_backups: 1 dar::backup::base: / dar::backup::select: [] # empty list = full backup dar::backup::exclude: - dev - proc - run - srv/backups - srv/db-backups - srv/elasticsearch - srv/remote-backups - srv/softwareheritage/objects - srv/softwareheritage/postgres - srv/softwareheritage/scratch - srv/softwareheritage/scratch.2TB - srv/storage - sys - tmp - var/cache - var/lib/mysql - var/log/journal - var/run - var/tmp dar::backup::options: - -zbzip2 dar::cron: hour: 0 minute: fqdn_rand dar_server::backup::storage: /srv/remote-backups dar_server::central_host: uffizi.softwareheritage.org dar_server::cron: hour: '0-4' minute: '*/10' phabricator::basepath: /srv/phabricator phabricator::user: phabricator phabricator::vcs_user: git phabricator::notification::client_host: 127.0.0.1 phabricator::notification::client_port: 22280 phabricator::notification::listen: "%{hiera('phabricator::notification::client_host')}:%{hiera('phabricator::notification::client_port')}" phabricator::mysql::database_prefix: phabricator phabricator::mysql::username: phabricator phabricator::mysql::conf::max_allowed_packet: 33554432 phabricator::mysql::conf::sql_mode: STRICT_ALL_TABLES phabricator::mysql::conf::ft_stopword_file: "%{hiera('phabricator::basepath')}/phabricator/resources/sql/stopwords.txt" phabricator::mysql::conf::ft_min_word_len: 3 phabricator::mysql::conf::ft_boolean_syntax: "' |-><()~*:\"\"&^'" phabricator::mysql::conf::innodb_buffer_pool_size: 4G phabricator::mysql::conf::innodb_file_per_table: TRUE phabricator::mysql::conf::innodb_flush_method: O_DIRECT phabricator::mysql::conf::innodb_log_file_size: 1G phabricator::mysql::conf::max_connections: 16384 phabricator::php::fpm_listen: 127.0.0.1:9001 phabricator::php::max_file_size: 128M phabricator::php::opcache_validate_timestamps: 0 phabricator::vhost::name: forge.softwareheritage.org phabricator::vhost::docroot: "%{hiera('phabricator::basepath')}/phabricator/webroot" phabricator::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}" phabricator::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}" phabricator::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}" phabricator::vhost::hsts_header: "%{hiera('apache::hsts_header')}" mediawiki::php::fpm_listen: 127.0.0.1:9002 mediawiki::vhosts: intranet.softwareheritage.org: swh_logo: /images/9/99/Swh-intranet-logo.png mysql: username: mw_intranet dbname: mediawiki_intranet aliases: [] site_name: Software Heritage Intranet wiki.softwareheritage.org: swh_logo: /images/b/b2/Swh-logo.png mysql: username: mw_public dbname: mediawiki_public aliases: [] site_name: Software Heritage Wiki mediawiki::vhost::docroot: /var/lib/mediawiki mediawiki::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}" mediawiki::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}" mediawiki::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}" mediawiki::vhost::hsts_header: "%{hiera('apache::hsts_header')}" annex::basepath: /srv/softwareheritage/annex annex::vhost::name: annex.softwareheritage.org annex::vhost::docroot: "%{hiera('annex::basepath')}/webroot" annex::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}" annex::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}" annex::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}" annex::vhost::hsts_header: "%{hiera('apache::hsts_header')}" docs::basepath: /srv/softwareheritage/docs docs::vhost::name: docs.softwareheritage.org docs::vhost::docroot: "%{hiera('docs::basepath')}/webroot" docs::vhost::docroot_owner: "jenkins-push-docs" docs::vhost::docroot_group: "www-data" docs::vhost::docroot_mode: "2755" docs::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}" docs::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}" docs::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}" docs::vhost::hsts_header: "%{hiera('apache::hsts_header')}" ssh::port: 22 ssh::permitrootlogin: without-password swh::base_directory: /srv/softwareheritage swh::conf_directory: /etc/softwareheritage swh::log_directory: /var/log/softwareheritage swh::global_conf::file: "%{hiera('swh::conf_directory')}/global.ini" swh::global_conf::contents: | # Managed by puppet (class profile::swh) - modifications will be overwritten [main] log_db = swh::apt_config::swh_repository::hostname: debian.softwareheritage.org swh::apt_config::swh_repository: "https://%{hiera('swh::apt_config::swh_repository::hostname')}/" swh::apt_config::enable_non_free: false swh::apt_config::backported_packages: stretch: # For swh.scheduler - python3-msgpack # T1609 - python3-urllib3 - python3-requests - python3-chardet - python3-idna debian_repository::basepath: "%{hiera('swh::base_directory')}/repository" debian_repository::owner: swhdebianrepo debian_repository::owner::homedir: /home/swhdebianrepo debian_repository::group: swhdev debian_repository::mode: "02775" debian_repository::ssh_authorized_keys: nicolasd@darboux: type: ssh-rsa key: AAAAB3NzaC1yc2EAAAADAQABAAABAQDZ1TCpfzrvxLhEMhxjbxqPDCwY0nazIr1cyIbhGD2bUdAbZqVMdNtr7MeDnlLIKrIPJWuvltauvLNkYU0iLc1jMntdBCBM3hgXjmTyDtc8XvXseeBp5tDqccYNR/cnDUuweNcL5tfeu5kzaAg3DFi5Dsncs5hQK5KQ8CPKWcacPjEk4ir9gdFrtKG1rZmg/wi7YbfxrJYWzb171hdV13gSgyXdsG5UAFsNyxsKSztulcLKxvbmDgYbzytr38FK2udRk7WuqPbtEAW1zV4yrBXBSB/uw8EAMi+wwvLTwyUcEl4u0CTlhREljUx8LhYrsQUCrBcmoPAmlnLCD5Q9XrGH jenkins@thyssen: type: ssh-rsa key: AAAAB3NzaC1yc2EAAAADAQABAAABAQCrfYnl8v4QK1ClkPMHO4WiPqgLVoOGpOPFUvg3WehMo8xMQ9e/EeZddQn96mhHkbbC5HCWEVK1VwafpIeadaMHnypdGhpapncYPpoKItxmf1IwVtlt/h8OYai5pTMCgkuOHjhnQdO20Amr9WMkoRZ/K7v/GijIZ6svvgWiYKfDnu0s1ziFYIT5rEA5hL9SqNJTlKdy2H68/7mmTii9NpBsGWQYDOjcrwELNOI5EUgQSOzmeKxecPkABfh/dezp6jmrv/2x7bm7LT46d+rnVDqVRiUrLVnLhrZCmZDxXfbEmftTdAoK8U/wjLreanRxKOc7arYRyKu0RbAaejPejzgR debian_repository::gpg_keys: # olasd - 791F12396630DD71FD364375B8E5087766475AAF # zack - 4900707DDC5C07F2DECB02839C31503C6D866396 # ardumont - BF00203D741AC9D546A8BE0752E2E9840D10C3B8 # anlambert - 91FAF3F5CDE011E4FDF4CBF2D026E5C2F802586D # seirl - 225CD9E3FA9374BDF6E057042F8984858B1A9945 # vlorentz - 379043E3DF96D3237E6782AC0E082B40E4376B1E # ddouard - 7DC7325EF1A6226AB6C3D7E32388A3BF6F0A6938 # jenkins-debian1 - 1F4BDC445E30C7066324D7B3D7D3329147AE3148 debian_repository::vhost::name: "%{hiera('swh::apt_config::swh_repository::hostname')}" debian_repository::vhost::aliases: - debian.internal.softwareheritage.org debian_repository::vhost::docroot: "%{hiera('debian_repository::basepath')}" debian_repository::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}" debian_repository::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}" debian_repository::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}" debian_repository::vhost::hsts_header: "%{hiera('apache::hsts_header')}" swh::apt_config::debian_mirror::hostname: deb.debian.org swh::apt_config::debian_mirror: "http://%{hiera('swh::apt_config::debian_mirror::hostname')}/debian/" swh::apt_config::debian_security_mirror::hostname: "%{hiera('swh::apt_config::debian_mirror::hostname')}" swh::apt_config::debian_security_mirror: "http://%{hiera('swh::apt_config::debian_mirror::hostname')}/debian-security/" swh::apt_config::azure_repository::hostname: debian-archive.trafficmanager.net swh::apt_config::azure_repository: "http://%{hiera('swh::apt_config::azure_repository::hostname')}/debian-azure/" swh::apt_config::unattended_upgrades: true swh::apt_config::unattended_upgraes::origins: - "o=Debian,n=%{::lsbdistcodename}" # main Debian archive - "o=Debian,n=%{::lsbdistcodename}-updates" # stable-updates (ex-volatile) - "o=Debian,n=%{::lsbdistcodename},l=Debian-Security" # security updates - "o=debian icinga-%{::lsbdistcodename},n=icinga-%{::lsbdistcodename}" # Icinga2 repository - "o=Debian Azure,n=%{::lsbdistcodename}" # Debian Azure - "o=Proxmox,n=%{::lsbdistcodename}" # Proxmox repository - "o=packages.sury.org" # PHP backports (tate) ##################################################################################################### # Remote service configurations # Default ports swh::remote_service::storage::port: 5002 swh::remote_service::objstorage::port: 5003 swh::remote_service::webapp::port: 5004 swh::remote_service::vault::port: 5005 swh::remote_service::deposit::port: 5006 swh::remote_service::indexer::port: 5007 swh::remote_service::scheduler::port: 5008 # Default backend services. Override in specific sites if needed. Configurations # are split between read-only (the default) and writable storages. In most cases # overrides should only happen for read-only services. swh::remote_service::objstorage::config: "%{alias('swh::remote_service::objstorage::config::azure_readonly_with_fallback')}" swh::remote_service::objstorage::config::writable: "%{alias('swh::remote_service::objstorage::config::uffizi')}" swh::remote_service::objstorage::config_as_dict: banco: "%{alias('swh::remote_service::objstorage::config::banco')}" uffizi: "%{alias('swh::remote_service::objstorage::config::uffizi')}" azure: "%{alias('swh::remote_service::objstorage::config::azure')}" swh::remote_service::storage::config: "%{alias('swh::remote_service::storage::config::uffizi')}" swh::remote_service::storage::config::writable: &swh_remote_service_storage_config_writable "%{alias('swh::remote_service::storage::config::uffizi')}" swh::remote_service::indexer::config: "%{alias('swh::remote_service::indexer::config::uffizi')}" swh::remote_service::indexer::config::writable: "%{alias('swh::remote_service::indexer::config::uffizi')}" swh::remote_service::scheduler::config: "%{alias('swh::remote_service::scheduler::config::saatchi')}" swh::remote_service::scheduler::config::writable: "%{alias('swh::remote_service::scheduler::config::saatchi')}" swh::remote_service::vault::config: "%{alias('swh::remote_service::vault::config::azure')}" swh::remote_service::vault::config::writable: "%{alias('swh::remote_service::vault::config::azure')}" # Objstorage backend configurations swh::remote_service::objstorage::config::azure: &swh_objstorage_config_azure cls: azure-prefixed args: accounts: "0": account_name: 0euwestswh api_secret_key: "%{hiera('swh::azure::credentials::0euwestswh')}" container_name: contents "1": account_name: 1euwestswh api_secret_key: "%{hiera('swh::azure::credentials::1euwestswh')}" container_name: contents "2": account_name: 2euwestswh api_secret_key: "%{hiera('swh::azure::credentials::2euwestswh')}" container_name: contents "3": account_name: 3euwestswh api_secret_key: "%{hiera('swh::azure::credentials::3euwestswh')}" container_name: contents "4": account_name: 4euwestswh api_secret_key: "%{hiera('swh::azure::credentials::4euwestswh')}" container_name: contents "5": account_name: 5euwestswh api_secret_key: "%{hiera('swh::azure::credentials::5euwestswh')}" container_name: contents "6": account_name: 6euwestswh api_secret_key: "%{hiera('swh::azure::credentials::6euwestswh')}" container_name: contents "7": account_name: 7euwestswh api_secret_key: "%{hiera('swh::azure::credentials::7euwestswh')}" container_name: contents "8": account_name: 8euwestswh api_secret_key: "%{hiera('swh::azure::credentials::8euwestswh')}" container_name: contents "9": account_name: 9euwestswh api_secret_key: "%{hiera('swh::azure::credentials::9euwestswh')}" container_name: contents "a": account_name: aeuwestswh api_secret_key: "%{hiera('swh::azure::credentials::aeuwestswh')}" container_name: contents "b": account_name: beuwestswh api_secret_key: "%{hiera('swh::azure::credentials::beuwestswh')}" container_name: contents "c": account_name: ceuwestswh api_secret_key: "%{hiera('swh::azure::credentials::ceuwestswh')}" container_name: contents "d": account_name: deuwestswh api_secret_key: "%{hiera('swh::azure::credentials::deuwestswh')}" container_name: contents "e": account_name: eeuwestswh api_secret_key: "%{hiera('swh::azure::credentials::eeuwestswh')}" container_name: contents "f": account_name: feuwestswh api_secret_key: "%{hiera('swh::azure::credentials::feuwestswh')}" container_name: contents swh::remote_service::objstorage::config::azure::readonly: cls: filtered args: storage_conf: "%{alias('swh::remote_service::objstorage::config::azure')}" filters_conf: - type: readonly swh::remote_service::objstorage::config::uffizi: &swh_objstorage_config_uffizi cls: remote args: url: "http://uffizi.internal.softwareheritage.org:%{hiera('swh::remote_service::objstorage::port')}/" swh::remote_service::objstorage::config::uffizi::readonly: cls: filtered args: storage_conf: "%{alias('swh::remote_service::objstorage::config::uffizi')}" filters_conf: - type: readonly swh::remote_service::objstorage::config::banco: &swh_objstorage_config_banco cls: remote args: url: "http://banco.internal.softwareheritage.org:%{hiera('swh::remote_service::objstorage::port')}/" swh::remote_service::objstorage::config::banco::readonly: cls: filtered args: storage_conf: "%{alias('swh::remote_service::objstorage::config::banco')}" filters_conf: - type: readonly swh::remote_service::objstorage::config::azure_readonly_with_fallback: &swh_azure_readonly_with_fallback cls: multiplexer args: objstorages: - "%{alias('swh::remote_service::objstorage::config::azure::readonly')}" - "%{alias('swh::remote_service::objstorage::config::banco::readonly')}" - "%{alias('swh::remote_service::objstorage::config::uffizi::readonly')}" swh::remote_service::objstorage::config::localhost: cls: remote args: url: "http://127.0.0.1:%{hiera('swh::remote_service::objstorage::port')}/" # Storage backend configurations swh::remote_service::storage::config::uffizi: cls: remote args: url: "http://uffizi.internal.softwareheritage.org:%{hiera('swh::remote_service::storage::port')}/" swh::remote_service::storage::config::azure: cls: remote args: url: "http://storage0.euwest.azure.internal.softwareheritage.org:%{hiera('swh::remote_service::storage::port')}/" swh::remote_service::storage::config::localhost: cls: remote args: url: "http://localhost:%{hiera('swh::remote_service::storage::port')}/" # Indexer backend configurations swh::remote_service::indexer::config::uffizi: cls: remote args: url: "http://uffizi.internal.softwareheritage.org:%{hiera('swh::remote_service::indexer::port')}/" swh::remote_service::indexer::config::azure: cls: remote args: url: "http://storage0.euwest.azure.internal.softwareheritage.org:%{hiera('swh::remote_service::indexer::port')}/" # Scheduler backend configurations swh::remote_service::scheduler::config::saatchi: cls: remote args: url: "http://saatchi.internal.softwareheritage.org:%{hiera('swh::remote_service::scheduler::port')}/" # Vault backend configurations swh::remote_service::vault::config::azure: cls: remote args: url: "http://vangogh.euwest.azure.internal.softwareheritage.org:%{hiera('swh::remote_service::vault::port')}/" # End remote service configurations ##################################################################################################### swh::deploy::db::pgbouncer::port: 5432 swh::deploy::db::main::port: 5433 swh::deploy::db::secondary::port: 5434 swh::deploy::db::hdd::port: 5435 swh::deploy::db::pgbouncer::user::login: postgres pgbouncer::config_params: logfile: /var/log/postgresql/pgbouncer.log pidfile: /var/run/postgresql/pgbouncer.pid unix_socket_dir: /var/run/postgresql client_tls_sslmode: allow client_tls_ca_file: /etc/ssl/certs/ssl-cert-snakeoil.pem client_tls_key_file: /etc/ssl/private/ssl-cert-snakeoil.key client_tls_cert_file: /etc/ssl/certs/ssl-cert-snakeoil.pem server_tls_sslmode: allow listen_port: "%{hiera('swh::deploy::db::pgbouncer::port')}" listen_addr: - 127.0.0.1 - 127.0.1.1 - "%{hiera('pgbouncer::listen_addr')}" auth_type: "hba" auth_file: /etc/pgbouncer/userlist.txt auth_hba_file: "%{hiera('pgbouncer::auth_hba_file')}" admin_users: - "%{hiera('swh::deploy::db::pgbouncer::user::login')}" - olasd pool_mode: session server_reset_query: DISCARD ALL max_client_conn: 2000 default_pool_size: 2000 max_db_connections: 2000 max_user_connections: 2000 log_connections: 0 log_disconnections: 0 pgbouncer::user: postgres pgbouncer::group: postgres # swh::deploy::db::pgbouncer::user::password in private data pgbouncer::userlist: - user: "%{hiera('swh::deploy::db::pgbouncer::user::login')}" password: "%{hiera('swh::deploy::db::pgbouncer::user::password')}" pgbouncer::databases: [] swh::deploy::directory: "%{hiera('swh::conf_directory')}/deploy" swh::deploy::group: swhdeploy swh::deploy::public_key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWrJX/uUss/EYZaTp2EIsZgg3ZSH8JcNZV5gBdNZ7EHcQcqxYUCqmwv9Ss3xT8n9kIrH6iz/vquqf84XR+keoZK3bsp50tMOY8LJWpcl/JK2XD6ovoJrHPu+iAroLkE59RdTa1Vz+jF67Q2UuG9f0nKwL4rnkeWTyuK/zAbyHyYKFQntkkwMr5/YTU8sjl/4aNF/2Ww8hitdi2GORlCjav2bB0wyPBA2e8sMt8Hp9O4TIWg/RD6vPX+ZvuFaB/Lw/Hv21622QGTHoZiO92/8/W9/t24il6SU4z96ZGfXqdUZkpPYKBGwyIkZkS4dN6jb4CcRlyXTObphyu3dAlABRt swhworker@worker01' swh::deploy::storage::conf_directory: "%{hiera('swh::conf_directory')}/storage" swh::deploy::storage::conf_file: "%{hiera('swh::deploy::storage::conf_directory')}/storage.yml" swh::deploy::storage::user: swhstorage swh::deploy::storage::group: swhstorage swh::deploy::storage::db::host: db.internal.softwareheritage.org swh::deploy::storage::db::port: "%{alias('swh::deploy::db::pgbouncer::port')}" swh::deploy::storage::db::user: swhstorage swh::deploy::storage::db::dbname: softwareheritage swh::deploy::storage::directory: "%{hiera('swh::base_directory')}/objects" swh::deploy::storage::backend::listen::host: 127.0.0.1 swh::deploy::storage::backend::listen::port: "%{alias('swh::remote_service::storage::port')}" swh::deploy::storage::backend::workers: 4 swh::deploy::storage::backend::reload_mercy: 3600 swh::deploy::storage::backend::http_keepalive: 5 swh::deploy::storage::backend::http_timeout: 3600 swh::deploy::storage::backend::max_requests: 10000 swh::deploy::storage::backend::max_requests_jitter: 1000 swh::deploy::storage::backend::server_names: - "%{::swh_hostname.internal_fqdn}" - "%{::hostname}" - 127.0.0.1 - localhost - "::1" swh::deploy::storage::config: storage: cls: local args: db: "host=%{hiera('swh::deploy::storage::db::host')} port=%{hiera('swh::deploy::storage::db::port')} user=%{hiera('swh::deploy::storage::db::user')} dbname=%{hiera('swh::deploy::storage::db::dbname')} password=%{hiera('swh::deploy::storage::db::password')}" objstorage: "%{alias('swh::remote_service::objstorage::config')}" swh::deploy::indexer::storage::conf_file: "%{hiera('swh::deploy::storage::conf_directory')}/indexer.yml" swh::deploy::indexer::storage::user: swhstorage swh::deploy::indexer::storage::group: swhstorage swh::deploy::indexer::storage::db::host: somerset.internal.softwareheritage.org swh::deploy::indexer::storage::db::port: "%{alias('swh::deploy::db::pgbouncer::port')}" swh::deploy::indexer::storage::db::user: swhstorage swh::deploy::indexer::storage::db::dbname: softwareheritage-indexer swh::deploy::indexer::storage::backend::listen::host: 127.0.0.1 swh::deploy::indexer::storage::backend::listen::port: "%{alias('swh::remote_service::indexer::port')}" swh::deploy::indexer::storage::backend::workers: 4 swh::deploy::indexer::storage::backend::reload_mercy: 3600 swh::deploy::indexer::storage::backend::http_keepalive: 5 swh::deploy::indexer::storage::backend::http_timeout: 3600 swh::deploy::indexer::storage::backend::max_requests: 10000 swh::deploy::indexer::storage::backend::max_requests_jitter: 1000 swh::deploy::indexer::storage::backend::server_names: - "%{::swh_hostname.internal_fqdn}" - "%{::hostname}" - 127.0.0.1 - localhost - "::1" swh::deploy::indexer::storage::config: indexer_storage: cls: local args: db: "host=%{hiera('swh::deploy::indexer::storage::db::host')} port=%{hiera('swh::deploy::indexer::storage::db::port')} user=%{hiera('swh::deploy::indexer::storage::db::user')} dbname=%{hiera('swh::deploy::indexer::storage::db::dbname')} password=%{hiera('swh::deploy::indexer::storage::db::password')}" swh::deploy::vault::cache: "%{hiera('swh::base_directory')}/vault_cache" # Default cache (orangerie/orangeriedev) is a pathslicing objstorage swh::deploy::vault::config::cache: cls: pathslicing args: root: "%{hiera('swh::deploy::vault::cache')}" slicing: "0:1/1:5" swh::deploy::vault::conf_directory: "%{hiera('swh::conf_directory')}/vault" swh::deploy::vault::conf_file: "%{hiera('swh::deploy::vault::conf_directory')}/server.yml" swh::deploy::vault::user: swhvault swh::deploy::vault::group: swhvault swh::deploy::vault::db::host: db.internal.softwareheritage.org swh::deploy::vault::db::port: "%{alias('swh::deploy::db::pgbouncer::port')}" swh::deploy::vault::db::user: swh-vault swh::deploy::vault::db::dbname: swh-vault swh::deploy::vault::backend::listen::host: 127.0.0.1 swh::deploy::vault::backend::listen::port: "%{alias('swh::remote_service::vault::port')}" swh::deploy::vault::backend::workers: 4 swh::deploy::vault::backend::reload_mercy: 3600 swh::deploy::vault::backend::http_keepalive: 5 swh::deploy::vault::backend::http_timeout: 3600 swh::deploy::vault::backend::max_requests: 10000 swh::deploy::vault::backend::max_requests_jitter: 1000 swh::deploy::vault::backend::server_names: - "%{::swh_hostname.internal_fqdn}" - "%{::hostname}" - 127.0.0.1 - localhost - "::1" swh::deploy::vault::config: storage: "%{alias('swh::remote_service::storage::config')}" scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}" cache: "%{alias('swh::deploy::vault::config::cache')}" vault: cls: local args: db: "host=%{hiera('swh::deploy::vault::db::host')} port=%{hiera('swh::deploy::vault::db::port')} user=%{hiera('swh::deploy::vault::db::user')} dbname=%{hiera('swh::deploy::vault::db::dbname')} password=%{hiera('swh::deploy::vault::db::password')}" swh::deploy::journal::conf_directory: "%{hiera('swh::conf_directory')}/journal" swh::deploy::journal::brokers: - esnode1.internal.softwareheritage.org - esnode2.internal.softwareheritage.org - esnode3.internal.softwareheritage.org swh::deploy::journal::prefix: swh.journal.objects swh::deploy::journal_simple_checker_producer::conf_file: "%{hiera('swh::deploy::journal::conf_directory')}/checker.yml" swh::deploy::journal_simple_checker_producer::user: swhstorage swh::deploy::journal_simple_checker_producer::group: swhstorage swh::deploy::journal_simple_checker_producer::config: brokers: "%{alias('swh::deploy::journal::brokers')}" temporary_prefix: swh.tmp_journal.new storage_dbconn: "host=%{hiera('swh::deploy::storage::db::host')} port=%{hiera('swh::deploy::storage::db::port')} user=%{hiera('swh::deploy::storage::db::user')} dbname=%{hiera('swh::deploy::storage::db::dbname')} password=%{hiera('swh::deploy::storage::db::password')}" object_types: - content - directory - revision - release - origin - origin_visit swh::deploy::objstorage::conf_directory: "%{hiera('swh::conf_directory')}/objstorage" swh::deploy::objstorage::conf_file: "%{hiera('swh::deploy::objstorage::conf_directory')}/server.yml" swh::deploy::objstorage::user: "%{hiera('swh::deploy::storage::user')}" swh::deploy::objstorage::group: "%{hiera('swh::deploy::storage::group')}" swh::deploy::objstorage::directory: "%{hiera('swh::deploy::storage::directory')}" swh::deploy::objstorage::slicing: 0:2/2:4/4:6 swh::deploy::objstorage::config: objstorage: cls: pathslicing args: root: "%{hiera('swh::deploy::objstorage::directory')}" slicing: "%{hiera('swh::deploy::objstorage::slicing')}" client_max_size: 1073741824 # 1 GiB swh::deploy::objstorage::backend::listen::host: 127.0.0.1 swh::deploy::objstorage::backend::listen::port: "%{alias('swh::remote_service::objstorage::port')}" swh::deploy::objstorage::backend::workers: 4 swh::deploy::objstorage::backend::reload_mercy: 3600 swh::deploy::objstorage::backend::http_workers: 1 swh::deploy::objstorage::backend::http_keepalive: 5 swh::deploy::objstorage::backend::http_timeout: 3600 swh::deploy::objstorage::backend::max_requests: 0 swh::deploy::objstorage::backend::max_requests_jitter: 0 swh::deploy::objstorage::backend::server_names: - "%{::swh_hostname.internal_fqdn}" - "%{::hostname}" - 127.0.0.1 - localhost - "::1" swh::deploy::deposit::vhost::name: deposit.softwareheritage.org swh::deploy::deposit::url: https://deposit.softwareheritage.org swh::deploy::deposit::vhost::aliases: [] swh::deploy::deposit::vhost::docroot: "/var/www/%{hiera('swh::deploy::deposit::vhost::name')}" swh::deploy::deposit::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}" swh::deploy::deposit::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}" swh::deploy::deposit::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}" swh::deploy::deposit::locked_endpoints: - /1/private/[^/]+/[^/]+/[^/]+ - /1/private/deposits/ swh::deploy::deposit::config_directory: "%{hiera('swh::conf_directory')}/deposit" swh::deploy::deposit::config_file: "%{hiera('swh::deploy::deposit::config_directory')}/server.yml" swh::deploy::deposit::user: swhdeposit swh::deploy::deposit::group: swhdeposit swh::deploy::deposit::media_root_directory: /srv/storage/space/swh-deposit/uploads/ swh::deploy::deposit::db::host: db.internal.softwareheritage.org swh::deploy::deposit::db::port: "%{alias('swh::deploy::db::pgbouncer::port')}" swh::deploy::deposit::db::dbname: softwareheritage-deposit swh::deploy::deposit::db::dbuser: swhstorage swh::deploy::deposit::config::allowed_hosts: - deposit.internal.softwareheritage.org # swh::deploy::deposit::db::password: in private data # swh::deploy::deposit::runtime_secret_key in private data swh::deploy::deposit::config: max_upload_size: 209715200 tool: name: 'swh-deposit' version: '0.0.1' configuration: sword_version: 2 scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}" allowed_hosts: "%{alias('swh::deploy::deposit::config::allowed_hosts')}" private: secret_key: "%{hiera('swh::deploy::deposit::runtime_secret_key')}" db: host: "%{hiera('swh::deploy::deposit::db::host')}" port: "%{hiera('swh::deploy::deposit::db::port')}" name: "%{hiera('swh::deploy::deposit::db::dbname')}" user: "%{hiera('swh::deploy::deposit::db::dbuser')}" password: "%{hiera('swh::deploy::deposit::db::password')}" media_root: "%{hiera('swh::deploy::deposit::media_root_directory')}" loader-version: 2 swh::deploy::worker::loader_deposit::config_file: "%{hiera('swh::conf_directory')}/loader_deposit.yml" swh::deploy::worker::loader_deposit::concurrency: 1 swh::deploy::worker::loader_deposit::private_tmp: true swh::deploy::worker::loader_deposit::loglevel: info # deposit_basic_auth_swhworker_{username|password} in private_data swh::deploy::worker::loader_deposit::config: storage: cls: filter args: storage: cls: buffer args: storage: "%{alias('swh::remote_service::storage::config::writable')}" min_batch_size: content: 10000 content_bytes: 104857600 directory: 1000 revision: 1000 extraction_dir: /tmp/swh.loader.deposit/ celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.loader.package.deposit.tasks task_queues: - swh.loader.package.deposit.tasks.LoadDeposit deposit: url: "%{alias('swh::deploy::webapp::deposit::private::url')}" auth: username: "%{hiera('deposit_basic_auth_swhworker_username')}" password: "%{hiera('deposit_basic_auth_swhworker_password')}" swh::deploy::worker::checker_deposit::config_file: "%{hiera('swh::conf_directory')}/checker_deposit.yml" swh::deploy::worker::checker_deposit::concurrency: 1 swh::deploy::worker::checker_deposit::private_tmp: true swh::deploy::worker::checker_deposit::loglevel: info # deposit_basic_auth_swhworker_{username|password} in private_data swh::deploy::worker::checker_deposit::config: storage: "%{alias('swh::remote_service::storage::config::writable')}" extraction_dir: /tmp/swh.checker.deposit/ celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.deposit.loader.tasks task_queues: - swh.deposit.loader.tasks.ChecksDepositTsk url: "%{alias('swh::deploy::deposit::url')}" auth: username: "%{hiera('deposit_basic_auth_swhworker_username')}" password: "%{hiera('deposit_basic_auth_swhworker_password')}" swh::deploy::deposit::backend::listen::host: 127.0.0.1 swh::deploy::deposit::backend::listen::port: "%{alias('swh::remote_service::deposit::port')}" swh::deploy::deposit::backend::workers: 8 swh::deploy::deposit::backend::reload_mercy: 3600 swh::deploy::deposit::backend::http_keepalive: 5 swh::deploy::deposit::backend::http_timeout: 3600 swh::deploy::objstorage_log_checker::conf_directory: "%{hiera('swh::deploy::objstorage::conf_directory')}" swh::deploy::objstorage_log_checker::conf_file: "%{hiera('swh::deploy::objstorage_log_checker::conf_directory')}/log_checker.yml" swh::deploy::objstorage_log_checker::user: "%{hiera('swh::deploy::objstorage::user')}" swh::deploy::objstorage_log_checker::group: "%{hiera('swh::deploy::objstorage::group')}" swh::deploy::objstorage_log_checker:config: storage: cls: pathslicing args: root: "%{hiera('swh::deploy::objstorage::directory')}" slicing: "%{hiera('swh::deploy::objstorage::slicing')}" batch_size: 1000 log_tag: objstorage.checker.log swh::deploy::objstorage_repair_checker::conf_directory: "%{hiera('swh::deploy::objstorage::conf_directory')}" swh::deploy::objstorage_repair_checker::conf_file: "%{hiera('swh::deploy::objstorage_repair_checker::conf_directory')}/repair_checker.yml" swh::deploy::objstorage_repair_checker::user: "%{hiera('swh::deploy::objstorage::user')}" swh::deploy::objstorage_repair_checker::group: "%{hiera('swh::deploy::objstorage::group')}" swh::deploy::objstorage_repair_checker::config: storage: cls: pathslicing args: root: "%{hiera('swh::deploy::objstorage::directory')}" slicing: "%{hiera('swh::deploy::objstorage::slicing')}" batch_size: 1000 log_tag: objstorage.checker.repair backup_storages: "%{alias('swh::remote_service::objstorage::config_as_dict')}" swh::deploy::webapp::backported_packages: stretch: - python3-django - python-django-common swh::deploy::deposit::backported_packages: "%{alias('swh::deploy::webapp::backported_packages')}" swh::deploy::webapp::conf_directory: "%{hiera('swh::conf_directory')}/web" swh::deploy::webapp::conf_file: "%{hiera('swh::deploy::webapp::conf_directory')}/web.yml" swh::deploy::webapp::user: swhwebapp swh::deploy::webapp::group: swhwebapp swh::deploy::webapp::conf::log_dir: "%{hiera('swh::log_directory')}/webapp" swh::deploy::webapp::backend::listen::host: 127.0.0.1 swh::deploy::webapp::backend::listen::port: "%{alias('swh::remote_service::webapp::port')}" swh::deploy::webapp::backend::workers: 32 swh::deploy::webapp::backend::http_keepalive: 5 swh::deploy::webapp::backend::http_timeout: 3600 swh::deploy::webapp::backend::reload_mercy: 3600 swh::deploy::webapp::vhost::docroot: "/var/www/%{hiera('swh::deploy::webapp::vhost::name')}" swh::deploy::webapp::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}" swh::deploy::webapp::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}" swh::deploy::webapp::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}" swh::deploy::webapp::vhost::hsts_header: "%{hiera('apache::hsts_header')}" swh::deploy::webapp::config::allowed_hosts: - archive.softwareheritage.org - base.softwareheritage.org - archive.internal.softwareheritage.org swh::deploy::webapp::config::es_workers_index_url: http://esnode1.internal.softwareheritage.org:9200/swh_workers-* swh::deploy::webapp::production_db_dir: /var/lib/swh swh::deploy::webapp::production_db: "%{hiera('swh::deploy::webapp::production_db_dir')}/web.sqlite3" swh::deploy::webapp::deposit::private::url: "%{hiera('swh::deploy::deposit::url')}/1/private/" swh::deploy::webapp::config::throttling: cache_uri: "%{hiera('memcached::server::bind')}:%{hiera('memcached::server::port')}" scopes: swh_api: limiter_rate: default: 120/h exempted_networks: - 127.0.0.0/8 - 192.168.100.0/23 - 128.93.193.29 - 131.107.174.0/24 # OpenAIRE - 213.135.60.145 - 213.135.60.146 # DINSIC - 37.187.137.47 swh_api_origin_search: limiter_rate: default: 10/m swh_api_origin_visit_latest: # This endpoint gets called a lot (by default, up to 70 times # per origin search), so it deserves a much higher rate-limit # than the rest of the API. limiter_rate: default: 700/m swh_vault_cooking: limiter_rate: default: 120/h GET: 60/m exempted_networks: - 127.0.0.0/8 - 192.168.100.0/23 - 128.93.193.29 - 131.107.174.0/24 # OpenAIRE - 213.135.60.145 - 213.135.60.146 swh_save_origin: limiter_rate: default: 120/h POST: 10/h exempted_networks: - 127.0.0.0/8 - 192.168.100.0/23 - 128.93.193.29 - 131.107.174.0/24 # OpenAIRE - 213.135.60.145 - 213.135.60.146 # in private data: # deposit_basic_auth_swhworker_username # deposit_basic_auth_swhworker_password swh::deploy::webapp::config: storage: "%{alias('swh::remote_service::storage::config')}" vault: "%{alias('swh::remote_service::vault::config::writable')}" indexer_storage: "%{alias('swh::remote_service::indexer::config')}" scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}" log_dir: "%{hiera('swh::deploy::webapp::conf::log_dir')}" secret_key: "%{hiera('swh::deploy::webapp::conf::secret_key')}" content_display_max_size: 1048576 throttling: "%{alias('swh::deploy::webapp::config::throttling')}" allowed_hosts: "%{alias('swh::deploy::webapp::config::allowed_hosts')}" production_db: "%{hiera('swh::deploy::webapp::production_db')}" es_workers_index_url: "%{alias('swh::deploy::webapp::config::es_workers_index_url')}" deposit: private_api_url: "%{hiera('swh::deploy::webapp::deposit::private::url')}" private_api_user: "%{hiera('deposit_basic_auth_swhworker_username')}" private_api_password: "%{hiera('deposit_basic_auth_swhworker_password')}" swh::deploy::webapp::locked_endpoints: - /api/1/content/[^/]+/symbol/ - /api/1/entity/ - /api/1/provenance/ # local configuration for the scheduler swh::deploy::scheduler::config::local: &swh_scheduler_local_config scheduler: cls: local args: db: "host=%{hiera('swh::deploy::scheduler::db::host')} port=%{hiera('swh::deploy::scheduler::db::port')} dbname=%{hiera('swh::deploy::scheduler::db::dbname')} user=%{hiera('swh::deploy::scheduler::db::user')} password=%{hiera('swh::deploy::scheduler::db::password')}" swh::deploy::scheduler::conf_file: "%{hiera('swh::conf_directory')}/scheduler.yml" swh::deploy::scheduler::user: swhscheduler swh::deploy::scheduler::group: swhscheduler swh::deploy::scheduler::db::host: db.internal.softwareheritage.org swh::deploy::scheduler::db::port: "%{alias('swh::deploy::db::pgbouncer::port')}" swh::deploy::scheduler::db::dbname: softwareheritage-scheduler swh::deploy::scheduler::db::user: swhscheduler # swh::deploy::scheduler::db::password in private data # swh::deploy::scheduler::task_broker::password in private data swh::deploy::scheduler::task_broker: "amqp://swhproducer:%{hiera('swh::deploy::scheduler::task_broker::password')}@rabbitmq:5672//" swh::deploy::scheduler::listener::log_level: INFO swh::deploy::scheduler::runner::log_level: INFO swh::deploy::scheduler::config: <<: *swh_scheduler_local_config celery: task_broker: "%{alias('swh::deploy::scheduler::task_broker')}" swh::deploy::scheduler::task_packages: - python3-swh.lister - python3-swh.loader.debian - python3-swh.loader.dir - python3-swh.loader.git - python3-swh.loader.mercurial - python3-swh.loader.pypi - python3-swh.loader.svn - python3-swh.loader.tar - python3-swh.loader.npm - python3-swh.deposit.loader - python3-swh.indexer - python3-swh.vault swh::deploy::scheduler::backported_packages: jessie: - python3-sqlalchemy swh::deploy::scheduler::task_modules: - swh.lister.bitbucket.tasks - swh.lister.cgit.tasks - swh.lister.debian.tasks - swh.lister.github.tasks - swh.lister.gitlab.tasks - swh.lister.npm.tasks - swh.lister.pypi.tasks - swh.loader.debian.tasks - swh.loader.dir.tasks - swh.loader.git.tasks - swh.loader.mercurial.tasks - swh.loader.pypi.tasks - swh.loader.svn.tasks - swh.loader.tar.tasks - swh.deposit.loader.tasks - swh.indexer.tasks - swh.vault.cooking_tasks swh::deploy::scheduler::remote::conf_dir: "%{hiera('swh::conf_directory')}/backend" swh::deploy::scheduler::remote::conf_file: "%{hiera('swh::deploy::scheduler::remote::conf_dir')}/scheduler.yml" swh::deploy::scheduler::remote::user: swhscheduler swh::deploy::scheduler::remote::group: swhscheduler swh::deploy::scheduler::remote::backend::listen::host: 127.0.0.1 swh::deploy::scheduler::remote::backend::listen::port: "%{alias('swh::remote_service::scheduler::port')}" swh::deploy::scheduler::remote::backend::workers: 16 swh::deploy::scheduler::remote::backend::reload_mercy: 3600 swh::deploy::scheduler::remote::backend::http_keepalive: 5 swh::deploy::scheduler::remote::backend::http_timeout: 3600 swh::deploy::scheduler::remote::backend::max_requests: 10000 swh::deploy::scheduler::remote::backend::max_requests_jitter: 1000 swh::deploy::scheduler::remote::backend::server_names: - "%{::swh_hostname.internal_fqdn}" - "%{::hostname}" - 127.0.0.1 - localhost - "::1" swh::deploy::scheduler::remote::config: "%{alias('swh::deploy::scheduler::config::local')}" swh::deploy::scheduler::archive::conf_dir: "%{hiera('swh::conf_directory')}/backend" swh::deploy::scheduler::archive::conf_file: "%{hiera('swh::deploy::scheduler::archive::conf_dir')}/elastic.yml" swh::deploy::scheduler::archive::user: "%{hiera('swh::deploy::scheduler::user')}" swh::deploy::scheduler::archive::config: scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}" storage_nodes: - host: esnode2.internal.softwareheritage.org port: 9200 - host: esnode3.internal.softwareheritage.org port: 9200 - host: esnode1.internal.softwareheritage.org port: 9200 client_options: sniff_on_start: false sniff_on_connection_fail: True http_compress: false # Main lister configuration swh::deploy::worker::lister::db::user: swh-lister swh::deploy::worker::lister::db::name: swh-lister swh::deploy::worker::lister::db::host: db.internal.softwareheritage.org swh::deploy::worker::lister::db::port: "%{alias('swh::deploy::db::pgbouncer::port')}" # swh::deploy::lister::db::password in private data # swh::deploy::worker::task_broker::password in private data swh::deploy::worker::task_broker: "amqp://swhconsumer:%{hiera('swh::deploy::worker::task_broker::password')}@rabbitmq:5672//" swh::deploy::worker::instances: - loader_debian - loader_git - lister swh::deploy::worker::loader_git::config_file: "%{hiera('swh::conf_directory')}/loader_git.yml" swh::deploy::worker::loader_git::concurrency: 1 swh::deploy::worker::loader_git::loglevel: info swh::deploy::worker::loader_git::config: storage: "%{alias('swh::remote_service::storage::config::writable')}" save_data: false save_data_path: /srv/storage/space/data/sharded_packfiles directory_packet_size: 100 celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.loader.git.tasks task_queues: - swh.loader.git.tasks.UpdateGitRepository - swh_loader_git # loader-git-disk - swh.loader.git.tasks.LoadDiskGitRepository - swh.loader.git.tasks.UncompressAndLoadDiskGitRepository swh::deploy::worker::loader_debian::config_file: "%{hiera('swh::conf_directory')}/loader_debian.yml" swh::deploy::worker::loader_debian::private_tmp: true swh::deploy::worker::loader_debian::concurrency: 1 swh::deploy::worker::loader_debian::loglevel: info swh::deploy::worker::loader_debian::config: storage: cls: filter args: storage: cls: buffer args: storage: "%{alias('swh::remote_service::storage::config::writable')}" min_batch_size: content: 10000 content_bytes: 104857600 directory: 1000 revision: 1000 celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.loader.package.debian.tasks task_queues: - swh.loader.package.debian.tasks.LoadDebian swh::deploy::worker::loader_archive::config_file: "%{hiera('swh::conf_directory')}/loader_archive.yml" swh::deploy::worker::loader_archive::private_tmp: true swh::deploy::worker::loader_archive::concurrency: 1 swh::deploy::worker::loader_archive::loglevel: info swh::deploy::worker::loader_archive::config: storage: cls: filter args: storage: cls: buffer args: storage: "%{alias('swh::remote_service::storage::config::writable')}" min_batch_size: content: 10000 content_bytes: 104857600 directory: 1000 revision: 1000 celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.loader.package.archive.tasks task_queues: - swh.loader.package.archive.tasks.LoadArchive swh::deploy::worker::lister::config_file: "%{hiera('swh::conf_directory')}/lister.yml" swh::deploy::worker::lister::concurrency: 5 swh::deploy::worker::lister::loglevel: warning swh::deploy::worker::lister::config: storage: "%{alias('swh::remote_service::storage::config::writable')}" scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}" lister: cls: local args: db: "postgresql://%{hiera('swh::deploy::worker::lister::db::user')}:%{hiera('swh::deploy::lister::db::password')}@%{hiera('swh::deploy::worker::lister::db::host')}:%{hiera('swh::deploy::worker::lister::db::port')}/%{hiera('swh::deploy::worker::lister::db::name')}" celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.lister.bitbucket.tasks - swh.lister.cgit.tasks - swh.lister.debian.tasks - swh.lister.github.tasks - swh.lister.gitlab.tasks - swh.lister.gnu.tasks - swh.lister.npm.tasks - swh.lister.phabricator.tasks - swh.lister.pypi.tasks task_queues: - swh.lister.bitbucket.tasks.IncrementalBitBucketLister - swh.lister.bitbucket.tasks.FullBitBucketRelister - swh.lister.cgit.tasks.CGitListerTask - swh.lister.debian.tasks.DebianListerTask - swh.lister.github.tasks.IncrementalGitHubLister - swh.lister.github.tasks.RangeGitHubLister - swh.lister.github.tasks.FullGitHubRelister - swh.lister.gitlab.tasks.IncrementalGitLabLister - swh.lister.gitlab.tasks.RangeGitLabLister - swh.lister.gitlab.tasks.FullGitLabRelister - swh.lister.gnu.tasks.GNUListerTask - swh.lister.npm.tasks.NpmListerTask - swh.lister.phabricator.tasks.FullPhabricatorLister - swh.lister.pypi.tasks.PyPIListerTask credentials: "%{alias('swh::deploy::worker::lister::config::credentials')}" swh::deploy::worker::loader_mercurial::config_file: "%{hiera('swh::conf_directory')}/loader_mercurial.yml" swh::deploy::worker::loader_mercurial::concurrency: 1 swh::deploy::worker::loader_mercurial::private_tmp: true swh::deploy::worker::loader_mercurial::loglevel: info swh::deploy::worker::loader_mercurial::config: storage: "%{alias('swh::remote_service::storage::config::writable')}" reduce_effort: False clone_timeout_seconds: 7200 celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.loader.mercurial.tasks task_queues: - swh.loader.mercurial.tasks.LoadMercurial - swh.loader.mercurial.tasks.LoadArchiveMercurial - swh_loader_mercurial - swh_loader_mercurial_archive swh::deploy::worker::loader_pypi::config_file: "%{hiera('swh::conf_directory')}/loader_pypi.yml" swh::deploy::worker::loader_pypi::concurrency: 1 swh::deploy::worker::loader_pypi::private_tmp: true swh::deploy::worker::loader_pypi::loglevel: info swh::deploy::worker::loader_pypi::config: storage: cls: filter args: storage: cls: buffer args: storage: "%{alias('swh::remote_service::storage::config::writable')}" min_batch_size: content: 10000 content_bytes: 104857600 directory: 1000 revision: 1000 celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.loader.package.pypi.tasks task_queues: - swh.loader.package.pypi.tasks.LoadPyPI swh::deploy::worker::loader_npm::config_file: "%{hiera('swh::conf_directory')}/loader_npm.yml" swh::deploy::worker::loader_npm::concurrency: 1 swh::deploy::worker::loader_npm::private_tmp: true swh::deploy::worker::loader_npm::loglevel: info swh::deploy::worker::loader_npm::config: storage: cls: filter args: storage: cls: buffer args: storage: "%{alias('swh::remote_service::storage::config::writable')}" min_batch_size: content: 10000 content_bytes: 104857600 directory: 1000 revision: 1000 celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.loader.package.npm.tasks task_queues: - swh.loader.package.npm.tasks.LoadNpm swh::deploy::worker::loader_svn::config_file: "%{hiera('swh::conf_directory')}/loader_svn.yml" swh::deploy::worker::loader_svn::concurrency: 1 swh::deploy::worker::loader_svn::private_tmp: true swh::deploy::worker::loader_svn::limit_no_file: 8192 swh::deploy::worker::loader_svn::loglevel: info # Contains a password: in private data swh::deploy::worker::loader_svn::config: storage: "%{alias('swh::remote_service::storage::config::writable')}" celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.loader.svn.tasks task_queues: - swh.loader.svn.tasks.LoadSvnRepository - swh.loader.svn.tasks.MountAndLoadSvnRepository - swh.loader.svn.tasks.DumpMountAndLoadSvnRepository - swh_loader_svn - swh_loader_svn_mount_and_load swh::deploy::base_indexer::config_directory: "%{hiera('swh::conf_directory')}/indexer" swh::deploy::indexer_journal_client::config_file: "journal_client.yml" swh::deploy::indexer_journal_client::user: swhstorage swh::deploy::indexer_journal_client::group: swhstorage swh::deploy::indexer_journal_client::config: journal: brokers: "%{alias('swh::deploy::journal::brokers')}" group_id: swh.indexer.journal_client scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}" swh::deploy::worker::indexer_content_mimetype::config_file: "%{hiera('swh::conf_directory')}/indexer_content_mimetype.yml" swh::deploy::worker::indexer_content_mimetype::concurrency: 1 swh::deploy::worker::indexer_content_mimetype::loglevel: info # Contains a password: in private data swh::deploy::worker::indexer_content_mimetype::config: scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}" indexer_storage: "%{alias('swh::remote_service::indexer::config::writable')}" objstorage: "%{alias('swh::remote_service::objstorage::config')}" storage: "%{alias('swh::remote_service::storage::config')}" celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.indexer.tasks task_queues: - swh.indexer.tasks.ContentMimetype - swh.indexer.tasks.ContentRangeMimetype - swh_indexer_content_mimetype_range tools: name: file version: 1:5.30-1+deb9u1 configuration: type: library debian-package: python3-magic write_batch_size: 1000 swh::deploy::worker::indexer_origin_intrinsic_metadata::config_file: "%{hiera('swh::conf_directory')}/indexer_origin_intrinsic_metadata.yml" swh::deploy::worker::indexer_origin_intrinsic_metadata::concurrency: 1 swh::deploy::worker::indexer_origin_intrinsic_metadata::loglevel: info # Contains a password: in private data swh::deploy::worker::indexer_origin_intrinsic_metadata::config: scheduler: "%{alias('swh::remote_service::scheduler::config::writable')}" indexer_storage: "%{alias('swh::remote_service::indexer::config::writable')}" objstorage: "%{alias('swh::remote_service::objstorage::config')}" storage: "%{alias('swh::remote_service::storage::config')}" celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.indexer.tasks task_queues: - swh.indexer.tasks.OriginMetadata tools: name: swh-metadata-detector version: 0.0.2 configuration: {} swh::deploy::worker::indexer_rehash::config_file: "rehash.yml" swh::deploy::worker::indexer_rehash::concurrency: 5 swh::deploy::worker::indexer_rehash::loglevel: info # Contains a password: in private data swh::deploy::worker::indexer_rehash::config: storage: "%{alias('swh::remote_service::storage::config::writable')}" objstorage: "%{alias('swh::remote_service::objstorage::config')}" compute_checksums: - blake2s256 batch_size_retrieve_content: 10000 batch_size_update: 5000 swh::deploy::worker::indexer_fossology_license::config_file: "%{hiera('swh::conf_directory')}/indexer_fossology_license.yml" swh::deploy::worker::indexer_fossology_license::concurrency: 1 swh::deploy::worker::indexer_fossology_license::loglevel: info # Contains a password: in private data swh::deploy::worker::indexer_fossology_license::config: indexer_storage: "%{alias('swh::remote_service::indexer::config::writable')}" objstorage: "%{alias('swh::remote_service::objstorage::config')}" storage: "%{alias('swh::remote_service::storage::config')}" workdir: /tmp/swh/indexer.fossology.license/ tools: name: 'nomos' version: '3.1.0rc2-31-ga2cbb8c' configuration: command_line: "nomossa " celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.indexer.tasks task_queues: - swh.indexer.tasks.ContentFossologyLicense - swh.indexer.tasks.ContentRangeFossologyLicense - swh_indexer_content_fossology_license_range write_batch_size: 1000 swh::deploy::worker::indexer_content_ctags::config_file: "%{hiera('swh::conf_directory')}/indexer_content_ctags.yml" swh::deploy::worker::indexer_content_ctags::concurrency: 2 swh::deploy::worker::indexer_content_ctags::loglevel: info # Contains a password: in private data # objstorage configuration from swh::azure_objstorage::config is merged in the manifest swh::deploy::worker::indexer_content_ctags::config: indexer_storage: "%{alias('swh::remote_service::indexer::config::writable')}" objstorage: "%{alias('swh::remote_service::objstorage::config')}" workdir: /tmp/swh/indexer.ctags/ tools: name: 'universal-ctags' version: '0+git20181215-2' configuration: command_line: "ctags --fields=+lnz --sort=no --links=no --output-format=json " celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.indexer.tasks task_queues: - swh.indexer.tasks.Ctags languages: abap: '' abnf: '' actionscript: '' actionscript-3: '' ada: Ada adl: '' agda: '' alloy: '' ambienttalk: '' antlr: '' antlr-with-actionscript-target: '' antlr-with-c#-target: '' antlr-with-cpp-target: '' antlr-with-java-target: '' antlr-with-objectivec-target: '' antlr-with-perl-target: '' antlr-with-python-target: '' antlr-with-ruby-target: '' apacheconf: '' apl: '' applescript: '' arduino: '' aspectj: '' aspx-cs: '' aspx-vb: '' asymptote: '' autohotkey: '' autoit: '' awk: Awk base-makefile: Make bash: Sh bash-session: Sh batchfile: DosBatch bbcode: '' bc: '' befunge: '' blitzbasic: Basic blitzmax: '' bnf: '' boo: '' boogie: '' brainfuck: '' bro: '' bugs: '' c: C c#: C# c++: C++ c-objdump: asm ca65-assembler: asm cadl: '' camkes: '' cbm-basic-v2: '' ceylon: Java cfengine3: '' cfstatement: '' chaiscript: '' chapel: '' cheetah: '' cirru: '' clay: '' clojure: Clojure clojurescript: Clojure cmake: Make cobol: Cobol cobolfree: Cobol coffeescript: CoffeeScript coldfusion-cfc: HTML coldfusion-html: HTML common-lisp: Lisp component-pascal: Pascal coq: '' cpp-objdump: Asm cpsa: '' crmsh: Sh croc: '' cryptol: '' csound-document: '' csound-orchestra: '' csound-score: '' css: CSS css+django/jinja: CSS css+genshi-text: CSS css+lasso: CSS css+mako: CSS css+mozpreproc: CSS css+myghty: CSS css+php: CSS css+ruby: CSS css+smarty: CSS cuda: '' cypher: '' cython: Python d: D d-objdump: Asm darcs-patch: Diff dart: '' debian-control-file: '' debian-sourcelist: '' delphi: '' dg: '' diff: Diff django/jinja: Python docker: Iniconf dtd: '' duel: '' dylan: '' dylan-session: '' dylanlid: '' earl-grey: '' easytrieve: '' ebnf: '' ec: '' ecl: '' eiffel: Eiffel elixir: '' elixir-iex-session: '' elm: '' emacslisp: Lisp embedded-ragel: '' erb: Ruby erlang: Erlang erlang-erl-session: Erlang evoque: '' ezhil: '' factor: '' fancy: '' fantom: '' felix: '' fish: '' fortran: Fortran fortranfixed: Fortran nfoxpro: '' fsharp: Ocaml gap: '' gas: '' genshi: '' genshi-text: '' gettext-catalog: '' gherkin: '' glsl: '' gnuplot: '' go: Go golo: '' gooddata-cl: '' gosu: '' gosu-template: '' groff: '' groovy: '' haml: '' handlebars: '' haskell: '' haxe: '' hexdump: '' html: HTML html+cheetah: HTML html+django/jinja: HTML html+evoque: HTML html+genshi: HTML html+handlebars: HTML html+lasso: HTML html+mako: HTML html+myghty: HTML html+php: HTML html+smarty: HTML html+twig: HTML html+velocity: HTML http: '' hxml: '' hy: Lisp hybris: '' idl: '' idris: '' igor: '' inform-6: '' inform-6-template: '' inform-7: '' ini: Iniconf io: '' ioke: '' irc-logs: '' isabelle: '' j: '' jade: '' jags: '' jasmin: '' java: Java java-server-page: Java javascript: JavaScript javascript+cheetah: JavaScript javascript+django/jinja: JavaScript javascript+genshi-text: JavaScript javascript+lasso: JavaScript javascript+mako: JavaScript javascript+mozpreproc: JavaScript javascript+myghty: JavaScript javascript+php: JavaScript javascript+ruby: JavaScript javascript+smarty: JavaScript jcl: '' json: JSON json-ld: JSON julia: '' julia-console: '' kal: '' kconfig: '' koka: '' kotlin: '' lasso: '' lean: '' lesscss: CSS lighttpd-configuration-file: Iniconf limbo: '' liquid: '' literate-agda: '' literate-cryptol: '' literate-haskell: '' literate-idris: '' livescript: '' llvm: '' logos: '' logtalk: '' lsl: '' lua: Lua makefile: Make mako: '' maql: '' mask: '' mason: '' mathematica: MatLab matlab: MatLab matlab-session: MatLab minid: '' modelica: '' modula-2: '' moinmoin/trac-wiki-markup: '' monkey: '' moocode: '' moonscript: '' mozhashpreproc: '' mozpercentpreproc: '' mql: '' mscgen: '' msdos-session: '' mupad: '' mxml: '' myghty: '' mysql: SQL nasm: Asm nemerle: '' nesc: '' newlisp: Lisp newspeak: '' nginx-configuration-file: '' nimrod: '' nit: '' nix: '' nsis: '' numpy: '' objdump: Asm objdump-nasm: Asm objective-c: ObjectiveC objective-c++: Objective-C objective-j: '' ocaml: Ocaml octave: '' odin: '' ooc: '' opa: '' openedge-abl: '' pacmanconf: '' pan: '' parasail: '' pawn: '' perl: Perl perl6: Perl6 php: PHP pig: '' pike: '' pkgconfig: '' pl/pgsql: SQL postgresql-console-(psql): '' postgresql-sql-dialect: SQL postscript: '' povray: '' powershell: '' powershell-session: '' praat: '' prolog: '' properties: Iniconf protocol-buffer: Protobuf puppet: '' pypy-log: '' python: Python python-3: Python python-3.0-traceback: Python python-console-session: Python python-traceback: Python qbasic: '' qml: '' qvto: '' racket: LISP ragel: '' ragel-in-c-host: '' ragel-in-cpp-host: '' ragel-in-d-host: '' ragel-in-java-host: '' ragel-in-objective-c-host: '' ragel-in-ruby-host: '' raw-token-data: '' rconsole: '' rd: '' rebol: '' red: '' redcode: '' reg: '' resourcebundle: '' restructuredtext: reStructuredText rexx: REXX rhtml: '' roboconf-graph: '' roboconf-instances: '' robotframework: '' rpmspec: RpmSpec rql: '' rsl: '' ruby: Ruby ruby-irb-session: Sh rust: Rust s: '' sass: '' scala: Java scalate-server-page: '' scaml: SML scheme: Lisp scilab: '' scss: '' shen: '' slim: '' smali: '' smalltalk: '' smarty: '' snobol: '' sourcepawn: '' sparql: '' sql: SQL sqlite3con: SQL squidconf: '' stan: '' standard-ml: SML supercollider: '' swift: '' swig: '' systemverilog: SystemVerilog tads-3: '' tap: '' tcl: '' tcsh: Sh tcsh-session: Sh tea: '' termcap: '' terminfo: '' terraform: '' tex: Tex text-only: '' thrift: '' todotxt: '' trafficscript: '' treetop: '' turtle: '' twig: '' typescript: '' urbiscript: '' vala: '' vb.net: Basic vctreestatus: '' velocity: '' verilog: Verilog vgl: '' vhdl: VHDL viml: Vim x10: '' xml: '' xml+cheetah: '' xml+django/jinja: '' xml+evoque: '' xml+lasso: '' xml+mako: '' xml+myghty: '' xml+php: '' xml+ruby: '' xml+smarty: '' xml+velocity: '' xquery: '' xslt: XSLT xtend: '' xul+mozpreproc: '' yaml: '' yaml+jinja: '' zephir: Zephir unknown: '' swh::deploy::worker::vault_cooker::config_file: "%{hiera('swh::conf_directory')}/vault_cooker.yml" swh::deploy::worker::vault_cooker::concurrency: 20 swh::deploy::worker::vault_cooker::loglevel: info swh::deploy::worker::vault_cooker::conf_file: "%{hiera('swh::conf_directory')}/vault/cooker.yml" swh::deploy::worker::vault_cooker::config: storage: "%{alias('swh::remote_service::storage::config')}" vault: "%{alias('swh::remote_service::vault::config::writable')}" celery: task_broker: "%{alias('swh::deploy::worker::task_broker')}" task_modules: - swh.vault.cooking_tasks task_queues: - swh.vault.cooking_tasks.SWHCookingTask - swh.vault.cooking_tasks.SWHBatchCookingTask max_bundle_size: 1073741824 # 1GiB desktop::printers: MFP_C: uri: lpd://print.paris.inria.fr/MFP_C-pro description: Impression couleur location: Partout ppd: "%{hiera('desktop::printers::ppd_dir')}/MFP_Paris.ppd" ppd_options: ColorType: Color MFP: uri: lpd://print.paris.inria.fr/MFP-pro description: Impression Noir et Blanc location: Partout ppd: "%{hiera('desktop::printers::ppd_dir')}/MFP_Paris.ppd" ppd_options: ColorType: Mono desktop::printers::default: MFP desktop::printers::ppd_dir: /usr/share/ppd/softwareheritage desktop::printers::cups_usernames: ardumont: andumont morane: mgruenpe olasd: ndandrim seirl: apietri zack: zacchiro zookeeper::clusters: rocquencourt: 11: esnode1.internal.softwareheritage.org 12: esnode2.internal.softwareheritage.org 13: esnode3.internal.softwareheritage.org azure: 1: kafka01.euwest.azure.internal.softwareheritage.org 2: kafka02.euwest.azure.internal.softwareheritage.org 3: kafka03.euwest.azure.internal.softwareheritage.org 4: kafka04.euwest.azure.internal.softwareheritage.org 5: kafka05.euwest.azure.internal.softwareheritage.org 6: kafka06.euwest.azure.internal.softwareheritage.org zookeeper::datastore: /var/lib/zookeeper zookeeper::client_port: 2181 zookeeper::election_port: 2888 zookeeper::leader_port: 3888 kafka::version: '2.2.0' kafka::scala_version: '2.12' kafka::mirror_url: https://mirrors.ircam.fr/pub/apache/ kafka::logdirs: - /srv/kafka/logdir kafka::broker_config: log.dirs: "%{alias('kafka::logdirs')}" num.recovery.threads.per.data.dir: 10 kafka::clusters: rocquencourt: zookeeper::chroot: '/kafka/softwareheritage' zookeeper::servers: - esnode1.internal.softwareheritage.org - esnode2.internal.softwareheritage.org - esnode3.internal.softwareheritage.org brokers: esnode1.internal.softwareheritage.org: id: 11 esnode2.internal.softwareheritage.org: id: 12 esnode3.internal.softwareheritage.org: id: 13 broker::heap_opts: "-Xmx4G -Xms4G" azure: zookeeper::chroot: '/kafka/softwareheritage' zookeeper::servers: - kafka01.euwest.azure.internal.softwareheritage.org - kafka02.euwest.azure.internal.softwareheritage.org - kafka03.euwest.azure.internal.softwareheritage.org - kafka04.euwest.azure.internal.softwareheritage.org - kafka05.euwest.azure.internal.softwareheritage.org - kafka06.euwest.azure.internal.softwareheritage.org brokers: kafka01.euwest.azure.internal.softwareheritage.org: id: 1 kafka02.euwest.azure.internal.softwareheritage.org: id: 2 kafka03.euwest.azure.internal.softwareheritage.org: id: 3 kafka04.euwest.azure.internal.softwareheritage.org: id: 4 kafka05.euwest.azure.internal.softwareheritage.org: id: 5 kafka06.euwest.azure.internal.softwareheritage.org: id: 6 broker::heap_opts: "-Xmx1G -Xms1G" # Real exported files from munin stats_export::export_path: "/var/www/stats.export.softwareheritage.org" stats_export::export_file: "%{hiera('stats_export::export_path')}/history_counters.json" # Exposed through the following host's apache venv stats_export::vhost::name: stats.export.softwareheritage.org stats_export::vhost::docroot: "/var/www/%{hiera('stats_export::vhost::name')}" stats_export::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}" stats_export::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}" stats_export::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}" stats_export::vhost::hsts_header: "%{hiera('apache::hsts_header')}" icinga2::role: agent icinga2::master::zonename: master icinga2::master::db::username: icinga2 # icinga2::master::db::password in private data icinga2::master::db::database: icinga2 icinga2::icingaweb2::db::username: icingaweb2 # icinga2::icingaweb2::db::password in private data icinga2::icingaweb2::db::database: icingaweb2 icinga2::icingaweb2::protected_customvars: - "*pw*" - "*pass*" - community - http_auth_pair icinga2::icingaweb2::vhost::name: icinga.softwareheritage.org icinga2::icingaweb2::vhost::aliases: - icinga.internal.softwareheritage.org icinga2::icingaweb2::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}" icinga2::icingaweb2::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}" icinga2::icingaweb2::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}" icinga2::icingaweb2::vhost::hsts_header: "%{hiera('apache::hsts_header')}" icinga2::parent_zone: master icinga2::parent_endpoints: pergamon.softwareheritage.org: host: 192.168.100.29 icinga2::network: "%{lookup('internal_network')}" icinga2::features: - checker - mainlog icinga2::service_configuration: load: default: load_wload1: 30 load_wload5: 28 load_wload15: 26 load_cload1: 50 load_cload5: 45 load_cload15: 40 high: load_wload1: 140 load_wload5: 120 load_wload15: 100 load_cload1: 240 load_cload5: 220 load_cload15: 200 icinga2::host::vars: os: Linux cores: "%{::processorcount}" virtual_machine: "%{::is_virtual}" distro: "%{::operatingsystem}" disks: 'disk /': disk_partitions: '/' icinga2::apiusers: root: # password in private data permissions: - '*' icinga2::exported_checks::filename: "/etc/icinga2/zones.d/%{hiera('icinga2::parent_zone')}/exported-checks.conf" systemd_journal::logstash_hosts: - 'logstash.internal.softwareheritage.org:5044' memcached::server::bind: 127.0.0.1 memcached::server::port: 11211 memcached::server::max_memory: '5%' mountpoints: /srv/storage/space: device: uffizi:/srv/storage/space fstype: nfs options: - rw - soft - intr - rsize=8192 - wsize=8192 - noauto - x-systemd.automount - x-systemd.device-timeout=10 /srv/softwareheritage/objects: device: uffizi:/srv/softwareheritage/objects fstype: nfs options: - rw - soft - intr - rsize=8192 - wsize=8192 - noauto - x-systemd.automount - x-systemd.device-timeout=10 ceph::release: luminous ceph::fsid: b3e34018-388e-499b-9579-d1c0d57e8c09 # needs to match the values of $::hostname on the ceph monitors ceph::mon_initial_members: - ceph-mon1 ceph::mon_host: - 192.168.100.170 ceph::keys: admin: secret: "%{hiera('ceph::secrets::admin')}" cap_mds: allow cap_mgr: allow * cap_mon: allow * cap_osd: allow * bootstrap-osd: secret: "%{hiera('ceph::secrets::bootstrap_osd')}" cap_mon: allow profile bootstrap-osd proxmox-rbd: secret: "%{hiera('ceph::secrets::proxmox_rbd')}" cap_mon: profile rbd cap_osd: profile rbd pool=rbd swh-contents: secret: "%{hiera('ceph::secrets::swh_contents')}" cap_mon: allow r cap_osd: allow r pool=swh_contents swh-contents-rw: secret: "%{hiera('ceph::secrets::swh_contents_rw')}" cap_mon: allow r cap_osd: allow rw pool=swh_contents swh-contents-test: secret: "%{hiera('ceph::secrets::swh_contents_test')}" cap_mon: allow r cap_osd: allow r pool=swh_contents_test swh-contents-test-rw: secret: "%{hiera('ceph::secrets::swh_contents_test_rw')}" cap_mon: allow r cap_osd: allow rw pool=swh_contents_test ceph::default_client_keyring: /etc/softwareheritage/ceph-keyring ceph::client_keyrings: '/etc/softwareheritage/ceph-keyring': owner: root group: swhdev mode: '0644' keys: - swh-contents - swh-contents-test swh::deploy::objstorage::ceph::keyring: "%{alias('ceph::default_client_keyring')}" swh::deploy::objstorage::ceph::pool_name: swh_contents swh::deploy::objstorage::ceph::rados_id: swh-contents swh::deploy::objstorage::ceph::config: cls: rados args: pool_name: "%{alias('swh::deploy::objstorage::ceph::pool_name')}" rados_id: "%{alias('swh::deploy::objstorage::ceph::rados_id')}" ceph_config: keyring: "%{alias('swh::deploy::objstorage::ceph::keyring')}" nginx::package_name: nginx-light nginx::accept_mutex: 'off' nginx::names_hash_bucket_size: 128 nginx::names_hash_max_size: 1024 nginx::worker_processes: "%{::processorcount}" prometheus::server::defaults_config: web: enable_admin_api: true storage: tsdb: retention: '1y' prometheus::server::config::global: scrape_interval: 1m scrape_timeout: 45s prometheus::server::listen_network: "%{lookup('internal_network')}" prometheus::server::listen_port: 9090 prometheus::server::certname: pergamon.softwareheritage.org prometheus::node::listen_network: "%{lookup('internal_network')}" prometheus::node::listen_port: 9100 prometheus::node::textfile_directory: /var/lib/prometheus/node-exporter prometheus::node::defaults_config: collector: diskstats: ignored_devices: "^(ram|loop|fd|(h|s|v|xv)d[a-z]|nvme\\d+n\\d+p)\\d+$" filesystem: ignored_mount_points: "^/(sys|proc|dev|run|srv/softwareheritage/objects/[0-9a-f][0-9a-f])($|/)" systemd: true logind: true loadavg: true ntp: true netstat: true netdev: ignored_devices: "^lo$" textfile: directory: "%{lookup('prometheus::node::textfile_directory')}" prometheus::node::scripts::directory: /var/lib/prometheus/node-exporter-scripts prometheus::node::scripts: puppet-classes: mode: cron cron: user: root specification: minute: fqdn_rand apt: mode: cron cron: user: root specification: minute: fqdn_rand prometheus::statsd::listen_network: "%{lookup('internal_network')}" prometheus::statsd::listen_port: 9102 prometheus::statsd::defaults_config: {} prometheus::statsd::statsd_listen_tcp: 127.0.0.1:8125 prometheus::statsd::statsd_listen_udp: 127.0.0.1:8125 prometheus::statsd::mapping: defaults: timer_type: histogram buckets: - .005 - .01 - .025 - .05 - .1 - .25 - .5 - .75 - 1 - 2 - 5 - 10 - 15 - 30 - 45 - 60 - 120 - 300 - 600 - 900 - 1800 - 2700 - 3600 - 7200 prometheus::sql::listen_network: "%{lookup('internal_network')}" prometheus::sql::listen_port: 9237 prometheus::sql::config_snippets: - activity - queries - replication - wal prometheus::jmx::version: 0.11.0 prometheus::kafka::listen_network: "%{lookup('internal_network')}" prometheus::kafka::listen_port: 7071 grafana::db::database: grafana grafana::db::username: grafana # grafana::db::password in private-data grafana::backend::port: 3000 grafana::vhost::name: grafana.softwareheritage.org grafana::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}" grafana::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}" grafana::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}" grafana::vhost::hsts_header: "%{hiera('apache::hsts_header')}" grafana::config: app_mode: production server: root_url: "https://%{lookup('grafana::vhost::name')}/" http_port: "%{alias('grafana::backend::port')}" users: allow_sign_up: false auth.anonymous: enabled: true org_name: Software Heritage org_role: Viewer smtp: enabled: true skip_verify: true from_address: grafana@softwareheritage.org grafana::objects::organizations: - name: Software Heritage id: 1 grafana::objects::users: [] grafana::objects::datasources: - name: Prometheus (Pergamon) url: "http://pergamon.internal.softwareheritage.org:%{hiera('prometheus::server::listen_port')}" type: prometheus organization: 1 access_mode: proxy is_default: true java::distribution: jre jenkins::backend::url: http://thyssen.internal.softwareheritage.org:8080/ jenkins::vhost::name: jenkins.softwareheritage.org jenkins::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}" jenkins::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}" jenkins::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}" jenkins::vhost::hsts_header: "%{hiera('apache::hsts_header')}" jenkins::agent::jar_url: "https://%{hiera('jenkins::vhost::name')}/jnlpJars/agent.jar" jenkins::agent::jnlp::url: "%{hiera('jenkins::backend::url')}computer/%{::swh_hostname.internal_fqdn}/slave-agent.jnlp" # jenkins::agent::jnlp::token in private_data weekly_report_bot::user: nobody weekly_report_bot::cron: minute: 0 hour: 12 weekday: fri swh::postgres::service::users: - root - zack - ardumont swh::postgres::service::dbs: - alias: swh name: "%{hiera('swh::deploy::storage::db::dbname')}" host: "%{hiera('swh::deploy::storage::db::host')}" user: "%{hiera('swh::deploy::storage::db::user')}" port: "%{hiera('swh::deploy::db::pgbouncer::port')}" passwd: "%{hiera('swh::deploy::storage::db::password')}" - alias: swh-deposit name: "%{hiera('swh::deploy::deposit::db::dbname')}" host: "%{hiera('swh::deploy::deposit::db::host')}" user: "%{hiera('swh::deploy::deposit::db::dbuser')}" port: "%{hiera('swh::deploy::db::pgbouncer::port')}" passwd: "%{hiera('swh::deploy::deposit::db::password')}" - alias: swh-scheduler name: "%{hiera('swh::deploy::scheduler::db::dbname')}" host: "%{hiera('swh::deploy::scheduler::db::host')}" user: "%{hiera('swh::deploy::scheduler::db::user')}" port: "%{hiera('swh::deploy::db::pgbouncer::port')}" passwd: "%{hiera('swh::deploy::scheduler::db::password')}" - alias: swh-vault name: "%{hiera('swh::deploy::vault::db::dbname')}" host: "%{hiera('swh::deploy::vault::db::host')}" user: "%{hiera('swh::deploy::vault::db::user')}" port: "%{hiera('swh::deploy::db::pgbouncer::port')}" passwd: "%{hiera('swh::deploy::vault::db::password')}" - alias: swh-lister name: "%{hiera('swh::deploy::worker::lister::db::name')}" host: "%{hiera('swh::deploy::worker::lister::db::host')}" user: "%{hiera('swh::deploy::worker::lister::db::name')}" port: "%{hiera('swh::deploy::db::pgbouncer::port')}" passwd: "%{hiera('swh::deploy::lister::db::password')}" - alias: swh-replica name: "%{hiera('swh::deploy::storage::db::dbname')}" host: somerset.internal.softwareheritage.org user: "%{hiera('swh::deploy::db::pgbouncer::user::login')}" port: "%{hiera('swh::deploy::db::pgbouncer::port')}" passwd: "%{hiera('swh::deploy::storage::db::password')}" - alias: swh-indexer name: "%{hiera('swh::deploy::indexer::storage::db::dbname')}" host: "%{hiera('swh::deploy::indexer::storage::db::host')}" user: "%{hiera('swh::deploy::indexer::storage::db::user')}" port: "%{hiera('swh::deploy::db::pgbouncer::port')}" passwd: "%{hiera('swh::deploy::indexer::storage::db::password')}" elastic::elk_version: '6.8.4' # sentry::secret_key in private-data sentry::postgres::host: db.internal.softwareheritage.org sentry::postgres::port: 5432 sentry::postgres::dbname: sentry sentry::postgres::user: sentry # sentry::postgres::password in private-data sentry::kafka_cluster: rocquencourt sentry::backend::url: http://riverside.internal.softwareheritage.org:9000/ sentry::vhost::name: sentry.softwareheritage.org sentry::vhost::ssl_protocol: "%{hiera('apache::ssl_protocol')}" sentry::vhost::ssl_honorcipherorder: "%{hiera('apache::ssl_honorcipherorder')}" sentry::vhost::ssl_cipher: "%{hiera('apache::ssl_cipher')}" sentry::vhost::hsts_header: "%{hiera('apache::hsts_header')}" diff --git a/manifests/site.pp b/manifests/site.pp index 521628de..e14ece70 100644 --- a/manifests/site.pp +++ b/manifests/site.pp @@ -1,173 +1,169 @@ node 'louvre.internal.softwareheritage.org' { include role::swh_server } node /^(orsay|beaubourg|hypervisor\d+)\.(internal\.)?softwareheritage\.org$/ { include role::swh_hypervisor } node 'pergamon.softwareheritage.org' { include role::swh_sysadmin include profile::export_archive_counters } node 'tate.softwareheritage.org' { include role::swh_forge } node 'moma.softwareheritage.org' { include role::swh_api } node 'webapp0.softwareheritage.org' { include role::swh_base_api } node 'saatchi.internal.softwareheritage.org' { include role::swh_scheduler } node /^(belvedere|somerset).(internal.)?softwareheritage.org$/ { include role::swh_database include profile::pgbouncer } node 'banco.softwareheritage.org' { include role::swh_backup include role::postgresql_backup } node /^esnode\d+.(internal.)?softwareheritage.org$/ { include role::swh_elasticsearch } node /^kafka\d+\./ { include role::swh_kafka_broker } node 'granet.internal.softwareheritage.org' { include role::swh_graph_backend } node /^(unibo-test).(internal.)?softwareheritage.org$/ { include role::swh_vault_test } node /^(unibo-prod|vangogh).(euwest.azure.)?(internal.)?softwareheritage.org$/ { include role::swh_vault } node /^uffizi\.(internal\.)?softwareheritage\.org$/ { include role::swh_storage_baremetal } node /^storage\d+\.[^.]+\.azure\.internal\.softwareheritage\.org$/ { include role::swh_base_storage } node /^getty.(internal.)?softwareheritage.org$/ { include role::swh_journal_orchestrator } node /^worker\d+\.(internal\.)?softwareheritage\.org$/ { include role::swh_worker_inria } node /^worker\d+\..*\.azure\.internal\.softwareheritage\.org$/ { include role::swh_worker_azure } node /^dbreplica(0|1)\.euwest\.azure\.internal\.softwareheritage\.org$/ { include role::swh_database } node /^ceph-osd\d+\.internal\.softwareheritage\.org$/ { include role::swh_ceph_osd } node /^ceph-mon\d+\.internal\.softwareheritage\.org$/ { include role::swh_ceph_mon } node /^ns\d+\.(.*\.azure\.)?internal\.softwareheritage\.org/ { include role::swh_nameserver_secondary } node 'thyssen.internal.softwareheritage.org' { include role::swh_ci_server } node 'riverside.internal.softwareheritage.org' { include role::swh_sentry } node /^jenkins-debian\d+\.internal\.softwareheritage\.org$/ { include role::swh_ci_agent_debian } node 'logstash0.internal.softwareheritage.org' { include role::swh_logstash_instance } node 'kibana0.internal.softwareheritage.org' { include role::swh_kibana_instance } -node 'munin0.internal.softwareheritage.org' { - include role::swh_munin_master -} - node 'giverny.softwareheritage.org' { include role::swh_desktop } node 'db0.internal.staging.swh.network' { include role::swh_base_database include profile::postgresql::server include profile::pgbouncer include ::profile::devel::postgres } node 'scheduler0.internal.staging.swh.network' { include role::swh_scheduler include ::profile::devel::postgres } node 'gateway.internal.staging.swh.network' { include role::swh_gateway } node 'storage0.internal.staging.swh.network' { include role::swh_base_storage include ::profile::devel::postgres } node /^worker\d\.internal\.staging\.swh\.network$/ { include role::swh_worker_inria } node 'webapp.internal.staging.swh.network' { include role::swh_base_api include profile::network } node 'deposit.internal.staging.swh.network' { include role::swh_deposit include profile::postgresql::server include profile::pgbouncer include ::profile::devel::postgres } node 'vault.internal.staging.swh.network' { include role::swh_vault } node 'journal0.internal.staging.swh.network' { include role::swh_journal_allinone } node default { include role::swh_base include profile::puppet::agent } diff --git a/site-modules/profile/files/munin/rabbitmq/rabbitmq_connections b/site-modules/profile/files/munin/rabbitmq/rabbitmq_connections deleted file mode 100755 index 8b003de2..00000000 --- a/site-modules/profile/files/munin/rabbitmq/rabbitmq_connections +++ /dev/null @@ -1,99 +0,0 @@ -#!/bin/bash - -: << =cut - -=head1 NAME - -rabbitmq_connections - monitor the number of connections to RabbitMQ - -=head1 CONFIGURATION - -You will need to add configuration to -/etc/munin/plugin-conf.d/rabbitmq_connection.conf for this plugin to -work. - -=over 2 - -=item C - -Required. Valid choices are C and C. This is required -by C. - -=item C - -Optional, default value is 500 - -=item C - -Optional, default value is 1000 - -=back - -=head2 EXAMPLE CONFIGURATION - - [rabbitmq_connections] - user rabbitmq - env.conn_warn 512 - env.conn_crit 1024 - -=head1 MAGIC MARKERS - - #%# family=contrib - -=cut - -case $(whoami) in - rabbitmq|root) - ;; - *) - echo 'Error: Plugin requires "user" to be set in plugin configuration.' >&2 - echo 'See "munindoc rabbitmq_connections" for more information' >&2 - exit 1 - ;; -esac - -# If run with the "config"-parameter, give out information on how the -# graphs should look. - -if [ "$1" = "config" ]; then - CONN_WARN=${conn_warn:-500} - CONN_CRIT=${conn_crit:-1000} - - # The host name this plugin is for. (Can be overridden to have - # one machine answer for several) - - # The title of the graph - echo 'graph_title RabbitMQ connections' - # Arguments to "rrdtool graph". In this case, tell it that the - # lower limit of the graph is '0', and that 1k=1000 (not 1024) - echo 'graph_args --base 1000 -l 0' - # The Y-axis label - echo 'graph_vlabel connections' - # We want Cur/Min/Avg/Max unscaled (i.e. 0.42 load instead of - # 420 milliload) - #echo 'graph_scale no' - echo 'graph_category RabbitMQ' - - echo "connections.label Connections" - echo "connections.warning $CONN_WARN" - echo "connections.critical $CONN_CRIT" - echo "connections.info Number of active connections" - - echo 'graph_info Shows the number of connections to RabbitMQ' - # Last, if run with the "config"-parameter, quit here (don't - # display any data) - exit 0 -fi - -# If not run with any parameters at all (or only unknown ones), do the -# real work - i.e. display the data. Almost always this will be -# "value" subfield for every data field. - -if hash rabbitmqctl >/dev/null 2>&1; then - connections=$(HOME=/tmp rabbitmqctl list_connections state | grep -c running) -else - echo "$0: Could not run rabbitmqctl" >&2 - connections=U -fi - -printf "connections.value %s\n" "$connections" diff --git a/site-modules/profile/files/munin/rabbitmq/rabbitmq_consumers b/site-modules/profile/files/munin/rabbitmq/rabbitmq_consumers deleted file mode 100755 index 92a59d34..00000000 --- a/site-modules/profile/files/munin/rabbitmq/rabbitmq_consumers +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash -# -# Plugin to monitor the queues of a virtual_host in RabbitMQ -# -# Usage: Link or copy into /etc/munin/node.d/ -# -# Parameters -# env.vhost -# env.queue_warn -# env.queue_crit -# -# Magic markers (optional - only used by munin-config and some -# installation scripts): -# -#%# family=auto -#%# capabilities=autoconf - -# If run with the "autoconf"-parameter, give our opinion on whether we -# should be run on this system or not. This is optinal, and only used by -# munin-config. In the case of this plugin, we should most probably -# always be included. - -if [ "$1" = "autoconf" ]; then - echo yes - exit 0 -fi - -# If run with the "config"-parameter, give out information on how the -# graphs should look. - -HOME=/tmp/ -VHOST=${vhost:-"/"} -FILTER=${filter:-"^(amq\.gen-.*|celery@.*\.pidbox|celeryev\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})"} -QUEUES=$(HOME=$HOME rabbitmqctl list_queues -p $VHOST name | \ - grep -v '^Listing' | \ - grep -v 'done\.$' | \ - grep -Ev $FILTER | \ - sed -e 's/[.=-]/_/g' ) - -if [ "$1" = "config" ]; then - QUEUE_WARN=${queue_warn:-100} - QUEUE_CRIT=${queue_crit:-500} - - # The host name this plugin is for. (Can be overridden to have - # one machine answer for several) - - # The title of the graph - echo "graph_title RabbitMQ $VHOST consumers" - # Arguments to "rrdtool graph". In this case, tell it that the - # lower limit of the graph is '0', and that 1k=1000 (not 1024) - echo 'graph_args --base 1000 -l 0' - # The Y-axis label - echo 'graph_vlabel consumers' - # We want Cur/Min/Avg/Max unscaled (i.e. 0.42 load instead of - # 420 milliload) - #echo 'graph_scale no' - echo 'graph_category RabbitMQ' - - for queue in $QUEUES; do - echo "$queue.label $queue" - echo "$queue.warning $QUEUE_WARN" - echo "$queue.critical $QUEUE_CRIT" - echo "$queue.info Active consumers for $queue" - done - - echo 'graph_info Lists active consumers for a queue.' - # Last, if run with the "config"-parameter, quit here (don't - # display any data) - exit 0 -fi - -# If not run with any parameters at all (or only unknown ones), do the -# real work - i.e. display the data. Almost always this will be -# "value" subfield for every data field. - -HOME=$HOME rabbitmqctl list_queues -p $VHOST name consumers| \ - grep -v "^Listing" | grep -v "done.$" | grep -Ev $FILTER | \ - perl -nle'($q, $s) = split; $q =~ s/[.=-]/_/g; print("$q.value $s")' diff --git a/site-modules/profile/files/munin/rabbitmq/rabbitmq_messages b/site-modules/profile/files/munin/rabbitmq/rabbitmq_messages deleted file mode 100755 index 7c592589..00000000 --- a/site-modules/profile/files/munin/rabbitmq/rabbitmq_messages +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash -# -# Plugin to monitor the queues of a virtual_host in RabbitMQ -# -# Usage: Link or copy into /etc/munin/node.d/ -# -# Parameters -# env.vhost -# env.queue_warn -# env.queue_crit -# -# Magic markers (optional - only used by munin-config and some -# installation scripts): -# -#%# family=auto -#%# capabilities=autoconf - -# If run with the "autoconf"-parameter, give our opinion on whether we -# should be run on this system or not. This is optinal, and only used by -# munin-config. In the case of this plugin, we should most probably -# always be included. - -if [ "$1" = "autoconf" ]; then - echo yes - exit 0 -fi - -# If run with the "config"-parameter, give out information on how the -# graphs should look. - -HOME=/tmp/ -VHOST=${vhost:-"/"} -FILTER=${filter:-"^(amq\.gen-.*|celery@.*\.pidbox|celeryev\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})"} -QUEUES=$(HOME=$HOME rabbitmqctl list_queues -p $VHOST name | \ - grep -v '^Listing' | \ - grep -v 'done\.$' | \ - grep -Ev $FILTER | \ - sed -e 's/[.=-]/_/g' ) - -if [ "$1" = "config" ]; then - QUEUE_WARN=${queue_warn:-10000} - QUEUE_CRIT=${queue_crit:-20000} - - # The host name this plugin is for. (Can be overridden to have - # one machine answer for several) - - # The title of the graph - echo "graph_title RabbitMQ $VHOST list_queues" - # Arguments to "rrdtool graph". In this case, tell it that the - # lower limit of the graph is '0', and that 1k=1000 (not 1024) - echo 'graph_args --base 1000 -l 0' - # The Y-axis label - echo 'graph_vlabel queue_size' - # We want Cur/Min/Avg/Max unscaled (i.e. 0.42 load instead of - # 420 milliload) - #echo 'graph_scale no' - echo 'graph_category RabbitMQ' - - for queue in $QUEUES; do - echo "$queue.label $queue" - echo "$queue.warning $QUEUE_WARN" - echo "$queue.critical $QUEUE_CRIT" - echo "$queue.info Queue size for $queue" - done - - echo 'graph_info Lists how many messages are in each queue.' - # Last, if run with the "config"-parameter, quit here (don't - # display any data) - exit 0 -fi - -# If not run with any parameters at all (or only unknown ones), do the -# real work - i.e. display the data. Almost always this will be -# "value" subfield for every data field. - -HOME=$HOME rabbitmqctl list_queues -p $VHOST | \ - grep -v "^Listing" | grep -v "done.$" | grep -Ev $FILTER | \ - perl -nle'($q, $s) = split; $q =~ s/[.=-]/_/g; print("$q.value $s")' diff --git a/site-modules/profile/files/munin/rabbitmq/rabbitmq_messages_unacknowledged b/site-modules/profile/files/munin/rabbitmq/rabbitmq_messages_unacknowledged deleted file mode 100755 index 6aecfc5b..00000000 --- a/site-modules/profile/files/munin/rabbitmq/rabbitmq_messages_unacknowledged +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash -# -# Plugin to monitor the queues of a virtual_host in RabbitMQ -# -# Usage: Link or copy into /etc/munin/node.d/ -# -# Parameters -# env.vhost -# env.queue_warn -# env.queue_crit -# -# Magic markers (optional - only used by munin-config and some -# installation scripts): -# -#%# family=auto -#%# capabilities=autoconf - -# If run with the "autoconf"-parameter, give our opinion on whether we -# should be run on this system or not. This is optinal, and only used by -# munin-config. In the case of this plugin, we should most probably -# always be included. - -if [ "$1" = "autoconf" ]; then - echo yes - exit 0 -fi - -# If run with the "config"-parameter, give out information on how the -# graphs should look. - -HOME=/tmp/ -VHOST=${vhost:-"/"} -FILTER=${filter:-"^(amq\.gen-.*|celery@.*\.pidbox|celeryev\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})"} -QUEUES=$(HOME=$HOME rabbitmqctl list_queues -p $VHOST name | \ - grep -v '^Listing' | \ - grep -v 'done\.$' | \ - grep -Ev $FILTER | \ - sed -e 's/[.=-]/_/g' ) - -if [ "$1" = "config" ]; then - QUEUE_WARN=${queue_warn:-10000} - QUEUE_CRIT=${queue_crit:-20000} - - # The host name this plugin is for. (Can be overridden to have - # one machine answer for several) - - # The title of the graph - echo "graph_title RabbitMQ $VHOST Unacknowledged Messages" - # Arguments to "rrdtool graph". In this case, tell it that the - # lower limit of the graph is '0', and that 1k=1000 (not 1024) - echo 'graph_args --base 1000 -l 0' - # The Y-axis label - echo 'graph_vlabel unacknowledged' - # We want Cur/Min/Avg/Max unscaled (i.e. 0.42 load instead of - # 420 milliload) - #echo 'graph_scale no' - echo 'graph_category RabbitMQ' - - for queue in $QUEUES; do - echo "$queue.label $queue" - echo "$queue.warning $QUEUE_WARN" - echo "$queue.critical $QUEUE_CRIT" - echo "$queue.info Unacknowledged messages for $queue" - done - - echo 'graph_info Lists how many messages are in each queue.' - # Last, if run with the "config"-parameter, quit here (don't - # display any data) - exit 0 -fi - -# If not run with any parameters at all (or only unknown ones), do the -# real work - i.e. display the data. Almost always this will be -# "value" subfield for every data field. - -HOME=$HOME rabbitmqctl list_queues -p $VHOST name messages_unacknowledged | \ - grep -v "^Listing" | grep -v "done.$" | grep -Ev $FILTER | \ - perl -nle'($q, $s) = split; $q =~ s/[.=-]/_/g; print("$q.value $s")' diff --git a/site-modules/profile/files/munin/rabbitmq/rabbitmq_messages_uncommitted b/site-modules/profile/files/munin/rabbitmq/rabbitmq_messages_uncommitted deleted file mode 100755 index 56ff2357..00000000 --- a/site-modules/profile/files/munin/rabbitmq/rabbitmq_messages_uncommitted +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash -# -# Plugin to monitor the queues of a virtual_host in RabbitMQ -# -# Usage: Link or copy into /etc/munin/node.d/ -# -# Parameters -# env.vhost -# env.queue_warn -# env.queue_crit -# -# Magic markers (optional - only used by munin-config and some -# installation scripts): -# -#%# family=auto -#%# capabilities=autoconf - -# If run with the "autoconf"-parameter, give our opinion on whether we -# should be run on this system or not. This is optinal, and only used by -# munin-config. In the case of this plugin, we should most probably -# always be included. - -if [ "$1" = "autoconf" ]; then - echo yes - exit 0 -fi - -# If run with the "config"-parameter, give out information on how the -# graphs should look. - -HOME=/tmp/ -VHOST=${vhost:-"/"} -FILTER=${filter:-"^(amq\.gen-.*|celery@.*\.pidbox|celeryev\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})"} -QUEUES=$(HOME=$HOME rabbitmqctl list_queues -p $VHOST name | \ - grep -v '^Listing' | \ - grep -v 'done\.$' | \ - grep -Ev $FILTER | \ - sed -e 's/[.=-]/_/g' ) - -if [ "$1" = "config" ]; then - QUEUE_WARN=${queue_warn:-10000} - QUEUE_CRIT=${queue_crit:-20000} - - # The host name this plugin is for. (Can be overridden to have - # one machine answer for several) - - # The title of the graph - echo "graph_title RabbitMQ $VHOST Uncommitted Messages" - # Arguments to "rrdtool graph". In this case, tell it that the - # lower limit of the graph is '0', and that 1k=1000 (not 1024) - echo 'graph_args --base 1000 -l 0' - # The Y-axis label - echo 'graph_vlabel uncommitted' - # We want Cur/Min/Avg/Max unscaled (i.e. 0.42 load instead of - # 420 milliload) - #echo 'graph_scale no' - echo 'graph_category RabbitMQ' - - for queue in $QUEUES; do - echo "$queue.label $queue" - echo "$queue.warning $QUEUE_WARN" - echo "$queue.critical $QUEUE_CRIT" - echo "$queue.info Uncommitted messages for $queue" - done - - echo 'graph_info Lists how many messages are in each queue.' - # Last, if run with the "config"-parameter, quit here (don't - # display any data) - exit 0 -fi - -# If not run with any parameters at all (or only unknown ones), do the -# real work - i.e. display the data. Almost always this will be -# "value" subfield for every data field. - -HOME=$HOME rabbitmqctl list_channels -p $VHOST name messages_uncommitted | \ - grep -v "^Listing" | grep -v "done.$" | grep -Ev $FILTER | \ - perl -nle'($q, $s) = split; $q =~ s/[.=-]/_/g; print("$q.value $s")' diff --git a/site-modules/profile/files/munin/rabbitmq/rabbitmq_queue_memory b/site-modules/profile/files/munin/rabbitmq/rabbitmq_queue_memory deleted file mode 100755 index 36433197..00000000 --- a/site-modules/profile/files/munin/rabbitmq/rabbitmq_queue_memory +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash -# -# Plugin to monitor the queues of a virtual_host in RabbitMQ -# -# Usage: Link or copy into /etc/munin/node.d/ -# -# Parameters -# env.vhost -# env.queue_warn -# env.queue_crit -# -# Magic markers (optional - only used by munin-config and some -# installation scripts): -# -#%# family=auto -#%# capabilities=autoconf - -# If run with the "autoconf"-parameter, give our opinion on whether we -# should be run on this system or not. This is optinal, and only used by -# munin-config. In the case of this plugin, we should most probably -# always be included. - -if [ "$1" = "autoconf" ]; then - echo yes - exit 0 -fi - -# If run with the "config"-parameter, give out information on how the -# graphs should look. - -HOME=/tmp/ -VHOST=${vhost:-"/"} -FILTER=${filter:-"^(amq\.gen-.*|celery@.*\.pidbox|celeryev\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})"} -QUEUES=$(HOME=$HOME rabbitmqctl list_queues -p $VHOST name | \ - grep -v '^Listing' | \ - grep -v 'done\.$' | \ - grep -Ev $FILTER | \ - sed -e 's/[.=-]/_/g' ) - -if [ "$1" = "config" ]; then - QUEUE_WARN=${queue_warn:-104857600} # 100 MB - QUEUE_CRIT=${queue_crit:-209715200} # 200 MB - - # The host name this plugin is for. (Can be overridden to have - # one machine answer for several) - - # The title of the graph - echo "graph_title RabbitMQ $VHOST Memory used by queue" - # Arguments to "rrdtool graph". In this case, tell it that the - # lower limit of the graph is '0', and that 1k=1000 (not 1024) - echo 'graph_args --base 1024 --vertical-label Bytes -l 0' - # The Y-axis label - echo 'graph_vlabel memory' - # We want Cur/Min/Avg/Max unscaled (i.e. 0.42 load instead of - # 420 milliload) - #echo 'graph_scale no' - echo 'graph_category RabbitMQ' - - for queue in $QUEUES; do - echo "$queue.label $queue" - echo "$queue.warning $QUEUE_WARN" - echo "$queue.critical $QUEUE_CRIT" - echo "$queue.info Memory used by $queue" - done - - echo 'graph_info Show memory usage by queue' - # Last, if run with the "config"-parameter, quit here (don't - # display any data) - exit 0 -fi - -# If not run with any parameters at all (or only unknown ones), do the -# real work - i.e. display the data. Almost always this will be -# "value" subfield for every data field. - -HOME=$HOME rabbitmqctl list_queues -p $VHOST name memory | \ - grep -v "^Listing" | grep -v "done.$" | \ - perl -nle'($q, $s) = split; $q =~ s/[.=-]/_/g; print("$q.value $s")' diff --git a/site-modules/profile/manifests/apache/common.pp b/site-modules/profile/manifests/apache/common.pp index 6e3451aa..f1c5ab28 100644 --- a/site-modules/profile/manifests/apache/common.pp +++ b/site-modules/profile/manifests/apache/common.pp @@ -1,5 +1,4 @@ class profile::apache::common { include ::apache include ::apache::mod::status - include ::profile::munin::plugins::apache } diff --git a/site-modules/profile/manifests/munin/master.pp b/site-modules/profile/manifests/munin/master.pp index 01040b20..06dda9e4 100644 --- a/site-modules/profile/manifests/munin/master.pp +++ b/site-modules/profile/manifests/munin/master.pp @@ -1,64 +1,8 @@ # Munin master class class profile::munin::master { $master_hostname = lookup('munin::master::hostname') - $master_hostname_domain = join(delete_at(split($master_hostname, '[.]'), 0), '.') - $master_hostname_target = "${::hostname}.${master_hostname_domain}." - - class { '::munin::master': - extra_config => ["cgiurl_graph http://$master_hostname"], - } - - include ::profile::apache::common - include ::apache::mod::rewrite - include ::apache::mod::fcgid - - $docroot = '/var/www/html' - - file {$docroot: - ensure => directory, - owner => 'www-data', - group => 'www-data', - mode => '0755', - } apache::vhost { $master_hostname: - port => 80, - docroot => $docroot, - rewrites => [ - { - comment => 'static resources', - rewrite_rule => [ - '^/favicon.ico /etc/munin/static/favicon.ico [L]', - '^/static/(.*) /etc/munin/static/$1 [L]', - ], - }, - { - comment => 'HTML', - rewrite_cond => [ - '%{REQUEST_URI} .html$ [or]', - '%{REQUEST_URI} =/', - ], - rewrite_rule => [ - '^/(.*) /usr/lib/munin/cgi/munin-cgi-html/$1 [L]', - ], - }, - { - comment => 'Images', - rewrite_rule => [ - '^/munin-cgi/munin-cgi-graph/(.*) /usr/lib/munin/cgi/munin-cgi-graph/$1 [L]', - '^/(.*) /usr/lib/munin/cgi/munin-cgi-graph/$1 [L]', - ], - }, - ], - directories => [ - { 'path' => '/usr/lib/munin/cgi', - 'options' => '+ExecCGI', - 'sethandler' => 'fcgid-script' - }, - { 'path' => "${export_path}", - 'options' => '+Indexes', # allow listing - } - ], + ensure => 'absent', } - } diff --git a/site-modules/profile/manifests/munin/node.pp b/site-modules/profile/manifests/munin/node.pp index 6db0cfb8..6e79dd56 100644 --- a/site-modules/profile/manifests/munin/node.pp +++ b/site-modules/profile/manifests/munin/node.pp @@ -1,41 +1,16 @@ -# Munin node class +# Purge the munin node configuration class profile::munin::node { - $munin_node_allow = lookup('munin::node::allow') - $munin_node_network = lookup('munin::node::network') - $munin_node_plugins_disable = lookup('munin::node::plugins::disable', Array, 'unique') - $munin_node_plugins_enable = lookup('munin::node::plugins::enable', Array, 'unique') - - class { '::munin::node': - allow => $munin_node_allow, - address => ip_for_network($munin_node_network), - bind_address => ip_for_network($munin_node_network), - masterconfig => [ - '', - '# The apt plugin doesn\'t graph by default. Let\'s make it.', - 'apt.graph yes', - 'apt.graph_category system', - 'apt.graph_vlabel Total Packages', - '', - '# Move the libvirt plugins to a spaceless category', - 'libvirt_blkstat.graph_category virtualization', - 'libvirt_cputime.graph_category virtualization', - 'libvirt_ifstat.graph_category virtualization', - 'libvirt_mem.graph_category virtualization', - ], + service {'munin-node': + ensure => stopped, + enable => false, } - - munin::plugin { $munin_node_plugins_enable: - ensure => link, + -> package {'munin-node': + ensure => purged, } - munin::plugin { $munin_node_plugins_disable: - ensure => absent, - } - - file_line { 'disable munin-node cron mail': - ensure => present, - path => '/etc/cron.d/munin-node', - line => 'MAILTO=""', - match => '^MAILTO=', - require => Package['munin-node'], + -> file {'/etc/munin': + ensure => absent, + recurse => true, + purge => true, + force => true, } } diff --git a/site-modules/profile/manifests/munin/plugins/apache.pp b/site-modules/profile/manifests/munin/plugins/apache.pp deleted file mode 100644 index a28bb1f4..00000000 --- a/site-modules/profile/manifests/munin/plugins/apache.pp +++ /dev/null @@ -1,13 +0,0 @@ -# Munin plugins for Apache - -class profile::munin::plugins::apache { - munin::plugin { 'apache_volume': - ensure => link, - } - munin::plugin { 'apache_accesses': - ensure => link, - } - munin::plugin { 'apache_processes': - ensure => link, - } -} diff --git a/site-modules/profile/manifests/munin/plugins/postgresql.pp b/site-modules/profile/manifests/munin/plugins/postgresql.pp deleted file mode 100644 index ddb85b95..00000000 --- a/site-modules/profile/manifests/munin/plugins/postgresql.pp +++ /dev/null @@ -1,60 +0,0 @@ -class profile::munin::plugins::postgresql { - munin::plugin { 'postgres_autovacuum': - ensure => link, - } - munin::plugin { 'postgres_bgwriter': - ensure => link, - } - munin::plugin { 'postgres_cache_ALL': - ensure => link, - target => 'postgres_cache_', - } - munin::plugin { 'postgres_checkpoints': - ensure => link, - } - munin::plugin { 'postgres_connections_ALL': - ensure => link, - target => 'postgres_connections_', - } - munin::plugin { 'postgres_connections_db': - ensure => link, - } - munin::plugin { 'postgres_locks_ALL': - ensure => link, - target => 'postgres_locks_' - } - munin::plugin { 'postgres_querylength_ALL': - ensure => link, - target => 'postgres_querylength_', - } - munin::plugin { 'postgres_scans_ALL': - ensure => link, - target => 'postgres_scans_', - } - munin::plugin { 'postgres_size_ALL': - ensure => link, - target => 'postgres_size_', - } - munin::plugin { 'postgres_streaming_ALL': - ensure => link, - target => 'postgres_streaming_', - } - munin::plugin { 'postgres_transactions_ALL': - ensure => link, - target => 'postgres_transactions_', - } - munin::plugin { 'postgres_tuples_ALL': - ensure => link, - target => 'postgres_tuples_', - } - munin::plugin { 'postgres_users': - ensure => link, - } - munin::plugin { 'postgres_xlog': - ensure => link, - } - - package { 'libdbd-pg-perl': - ensure => 'present', - } -> Munin::Plugin <| |> -} diff --git a/site-modules/profile/manifests/munin/plugins/rabbitmq.pp b/site-modules/profile/manifests/munin/plugins/rabbitmq.pp deleted file mode 100644 index e8c0ca08..00000000 --- a/site-modules/profile/manifests/munin/plugins/rabbitmq.pp +++ /dev/null @@ -1,41 +0,0 @@ -class profile::munin::plugins::rabbitmq { - $messages_warn = lookup('munin::plugins::rabbitmq::messages_warn') - $messages_crit = lookup('munin::plugins::rabbitmq::messages_crit') - $queue_memory_warn = lookup('munin::plugins::rabbitmq::queue_memory_warn') - $queue_memory_crit = lookup('munin::plugins::rabbitmq::queue_memory_crit') - - munin::plugin { - 'rabbitmq_connections': - ensure => present, - source => 'puppet:///modules/profile/munin/rabbitmq/rabbitmq_connections', - config => ['user root']; - 'rabbitmq_consumers': - ensure => present, - source => 'puppet:///modules/profile/munin/rabbitmq/rabbitmq_consumers', - config => ['user root']; - 'rabbitmq_messages': - ensure => present, - source => 'puppet:///modules/profile/munin/rabbitmq/rabbitmq_messages', - config => [ - 'user root', - "env.queue_warn ${messages_warn}", - "env.queue_crit ${messages_crit}", - ]; - 'rabbitmq_messages_unacknowledged': - ensure => present, - source => 'puppet:///modules/profile/munin/rabbitmq/rabbitmq_messages_unacknowledged', - config => ['user root']; - 'rabbitmq_messages_uncommitted': - ensure => present, - source => 'puppet:///modules/profile/munin/rabbitmq/rabbitmq_messages_uncommitted', - config => ['user root']; - 'rabbitmq_queue_memory': - ensure => present, - source => 'puppet:///modules/profile/munin/rabbitmq/rabbitmq_queue_memory', - config => [ - 'user root', - "env.queue_warn ${queue_memory_warn}", - "env.queue_crit ${queue_memory_crit}", - ]; - } -} diff --git a/site-modules/profile/manifests/rabbitmq.pp b/site-modules/profile/manifests/rabbitmq.pp index 6fe4fecf..e7f5c8e0 100644 --- a/site-modules/profile/manifests/rabbitmq.pp +++ b/site-modules/profile/manifests/rabbitmq.pp @@ -1,59 +1,58 @@ class profile::rabbitmq { - include ::profile::munin::plugins::rabbitmq $rabbitmq_vhost = '/' $rabbitmq_user = lookup('rabbitmq::monitoring::user') $rabbitmq_password = lookup('rabbitmq::monitoring::password') $users = lookup('rabbitmq::server::users') class { 'rabbitmq': service_manage => true, port => 5672, admin_enable => true, node_ip_address => '0.0.0.0', interface => '0.0.0.0', config_variables => { vm_memory_high_watermark => 0.6, }, heartbeat => 0, } -> rabbitmq_vhost { $rabbitmq_vhost: provider => 'rabbitmqctl', } each ( $users ) | $user | { $username = $user['name'] rabbitmq_user { $username: admin => $user['is_admin'], password => $user['password'], tags => $user['tags'], provider => 'rabbitmqctl', } -> rabbitmq_user_permissions { "${username}@${rabbitmq_vhost}": configure_permission => '.*', read_permission => '.*', write_permission => '.*', provider => 'rabbitmqctl', } } # monitoring user for the icinga check $icinga_checks_file = lookup('icinga2::exported_checks::filename') @@::icinga2::object::service {"rabbitmq-server on ${::fqdn}": service_name => 'rabbitmq server', import => ['generic-service'], host_name => $::fqdn, check_command => 'rabbitmq_server', vars => { rabbitmq_port => 15672, rabbitmq_vhost => $rabbitmq_vhost, rabbitmq_node => $::hostname, rabbitmq_user => $rabbitmq_user, rabbitmq_password => $rabbitmq_password, }, target => $icinga_checks_file, tag => 'icinga2::exported', } } diff --git a/site-modules/role/manifests/swh_database.pp b/site-modules/role/manifests/swh_database.pp index 23738995..e8f87cb4 100644 --- a/site-modules/role/manifests/swh_database.pp +++ b/site-modules/role/manifests/swh_database.pp @@ -1,5 +1,4 @@ class role::swh_database inherits role::swh_base_database { - include profile::munin::plugins::postgresql include profile::postgresql include profile::megacli } diff --git a/site-modules/role/manifests/swh_munin_master.pp b/site-modules/role/manifests/swh_munin_master.pp deleted file mode 100644 index cf9f33f5..00000000 --- a/site-modules/role/manifests/swh_munin_master.pp +++ /dev/null @@ -1,4 +0,0 @@ -class role::swh_munin_master inherits role::swh_server { - include profile::puppet::agent - include profile::munin::master -} diff --git a/site-modules/role/manifests/swh_sysadmin.pp b/site-modules/role/manifests/swh_sysadmin.pp index fb927beb..79940f33 100644 --- a/site-modules/role/manifests/swh_sysadmin.pp +++ b/site-modules/role/manifests/swh_sysadmin.pp @@ -1,28 +1,27 @@ class role::swh_sysadmin inherits role::swh_server { include profile::network include profile::prometheus::server include profile::grafana include profile::prometheus::sql include profile::puppet::master include profile::letsencrypt include profile::icinga2::icingaweb2 include profile::apache::simple_server include ::apache::mod::rewrite include profile::bind_server::primary - include profile::munin::plugins::postgresql include profile::annex_web include profile::stats_web include profile::docs_web include profile::debian_repository include profile::sentry::reverse_proxy include profile::weekly_report_bot } diff --git a/site-modules/role/manifests/swh_vault_test.pp b/site-modules/role/manifests/swh_vault_test.pp index 6c7e188c..a259e2c5 100644 --- a/site-modules/role/manifests/swh_vault_test.pp +++ b/site-modules/role/manifests/swh_vault_test.pp @@ -1,11 +1,10 @@ class role::swh_vault_test inherits role::swh_server { include profile::puppet::agent include profile::swh::deploy::vault include profile::swh::deploy::worker - include profile::munin::plugins::postgresql include profile::postgresql include profile::swh::deploy::objstorage }