diff --git a/.cookiecutter.context.yml b/.cookiecutter.context.yml new file mode 100644 index 0000000..e30830b --- /dev/null +++ b/.cookiecutter.context.yml @@ -0,0 +1,7 @@ +customer_name: geo_11 +odoo_version: '11.0' +customer_shortname: geo_11 +repo_name: geo_11 +project_id: '0000' +project_name: geo_11_odoo +odoo_company_name: geo_11 diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..07e1103 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +HISTORY.rst merge=union diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..7e59fc2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,18 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] + +# Translations +*.mo + +# backup files +*.~* + +# project specific +rancher/*/rancher.env + +# local dev requirements +odoo/dev_requirements.txt + +# local dev composition +docker-compose.override.yml diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..3af78cd --- /dev/null +++ b/.gitmodules @@ -0,0 +1,146 @@ +[submodule "odoo/src"] + path = odoo/src + url = git@github.com:OCA/OCB.git + branch = 11.0 + +; Camptocamp +; [submodule "odoo/external-src/odoo-dj"] +; path = odoo/external-src/odoo-dj +; url = git@github.com:camptocamp/odoo-dj.git +; branch = 11.0 +;[submodule "odoo/external-src/odoo-cloud-platform"] +; path = odoo/external-src/odoo-cloud-platform +; url = git@github.com:camptocamp/odoo-cloud-platform.git +; branch = 11.0 + +; Enterprise +;[submodule "odoo/external-src/enterprise"] +; path = odoo/external-src/enterprise +; url = git@github.com:odoo/enterprise.git +; branch = 11.0 + +; OCA +[submodule "odoo/external-src/server-tools"] + path = odoo/external-src/server-tools + url = git@github.com:OCA/server-tools.git + branch = 11.0 +[submodule "odoo/external-src/web"] + path = odoo/external-src/web + url = git@github.com:OCA/web.git + branch = 11.0 +[submodule "odoo/external-src/geospatial"] + path = odoo/external-src/geospatial + url = git@github.com:OCA/geospatial.git + branch = 11.0 +;[submodule "odoo/external-src/account-closing"] +; path = odoo/external-src/account-closing +; url = git@github.com:OCA/account-closing.git +; branch = 11.0 +;[submodule "odoo/external-src/account-analytic"] +; path = odoo/external-src/account-analytic +; url = git@github.com:OCA/account-analytic.git +; branch = 11.0 +;[submodule "odoo/external-src/account-financial-reporting"] +; path = odoo/external-src/account-financial-reporting +; url = git@github.com:OCA/account-financial-reporting.git +; branch = 11.0 +;[submodule "odoo/external-src/account-financial-tools"] +; path = odoo/external-src/account-financial-tools +; url = git@github.com:OCA/account-financial-tools.git +; branch = 11.0 +;[submodule "odoo/external-src/account-invoicing"] +; path = odoo/external-src/account-invoicing +; url = git@github.com:OCA/account-invoicing.git +; branch = 11.0 +;[submodule "odoo/external-src/bank-payment"] +; path = odoo/external-src/bank-payment +; url = git@github.com:OCA/bank-payment.git +; branch = 11.0 +;[submodule "odoo/external-src/bank-statement-reconcile"] +; path = odoo/external-src/bank-statement-reconcile +; url = git@github.com:OCA/bank-statement-reconcile.git +; branch = 11.0 +;[submodule "odoo/external-src/carrier-delivery"] +; path = odoo/external-src/carrier-delivery +; url = git@github.com:OCA/carrier-delivery.git +; branch = 11.0 +;[submodule "odoo/external-src/intrastat"] +; path = odoo/external-src/intrastat +; url = git@github.com:OCA/intrastat.git +; branch = 11.0 +;[submodule "odoo/external-src/partner-contact"] +; path = odoo/external-src/partner-contact +; url = git@github.com:OCA/partner-contact.git +; branch = 11.0 +;[submodule "odoo/external-src/pos"] +; path = odoo/external-src/pos +; url = git@github.com:OCA/pos.git +; branch = 11.0 +;[submodule "odoo/external-src/product-attribute"] +; path = odoo/external-src/product-attribute +; url = git@github.com:OCA/product-attribute.git +; branch = 11.0 +;[submodule "odoo/external-src/product-variant"] +; path = odoo/external-src/product-variant +; url = git@github.com:OCA/product-variant.git +; branch = 11.0 +;[submodule "odoo/external-src/purchase-workflow"] +; path = odoo/external-src/purchase-workflow +; url = git@github.com:OCA/purchase-workflow.git +; branch = 11.0 +;[submodule "odoo/external-src/rma"] +; path = odoo/external-src/rma +; url = git@github.com:OCA/rma.git +; branch = 11.0 +;[submodule "odoo/external-src/sale-workflow"] +; path = odoo/external-src/sale-workflow +; url = git@github.com:OCA/sale-workflow.git +; branch = 11.0 +;[submodule "odoo/external-src/report-print-send"] +; path = odoo/external-src/report-print-send +; url = git@github.com:OCA/report-print-send.git +; branch = 11.0 +;[submodule "odoo/external-src/reporting-engine"] +; path = odoo/external-src/reporting-engine +; url = git@github.com:OCA/reporting-engine.git +; branch = 11.0 +;[submodule "odoo/external-src/stock-logistics-workflow"] +; path = odoo/external-src/stock-logistics-workflow +; url = git@github.com:OCA/stock-logistics-workflow.git +; branch = 11.0 +;[submodule "odoo/external-src/stock-logistics-warehouse"] +; path = odoo/external-src/stock-logistics-warehouse +; url = git@github.com:OCA/stock-logistics-warehouse.git +; branch = 11.0 + +; Localizations +;[submodule "odoo/external-src/l10n-france"] +; path = odoo/external-src/l10n-france +; url = git@github.com:OCA/l10n-france.git +; branch = 11.0 +;[submodule "odoo/external-src/l10n-switzerland"] +; path = odoo/external-src/l10n-switzerland +; url = git@github.com:OCA/l10n-switzerland.git +; branch = 11.0 + +; Connectors +;[submodule "odoo/external-src/queue"] +; path = odoo/external-src/queue +; url = git@github.com:OCA/queue +; branch = 11.0 +;[submodule "odoo/external-src/connector"] +; path = odoo/external-src/connector +; url = git@github.com:OCA/connector +; branch = 11.0 +;[submodule "odoo/external-src/connector-ecommerce"] +; path = odoo/external-src/connector-ecommerce +; url = git@github.com:OCA/connector-ecommerce +; branch = 11.0 +;[submodule "odoo/external-src/connector-magento"] +; path = odoo/external-src/connector-magento +; url = git@github.com:OCA/connector-magento +; branch = 11.0 +;[submodule "odoo/external-src/connector-prestashop"] +; path = odoo/external-src/connector-prestashop +; url = git@github.com:OCA/connector-prestashop +; branch = 11.0 diff --git a/.sync.yml b/.sync.yml new file mode 100644 index 0000000..c6ced5c --- /dev/null +++ b/.sync.yml @@ -0,0 +1,25 @@ +# List of files to sync +sync: + include: + - ./.cookiecutter.context.yml + - ./tasks/* + - ./docs/* + - ./travis/docker-compose.yml + - ./travis/git_submodule_update.py + - ./travis/minion-client.py + - ./travis/publish.sh + - ./odoo/before-migrate-entrypoint.d/* + - ./odoo/start-entrypoint.d/* + - ./odoo/local-src/camptocamp_tools/* + - ./odoo/local-src/camptocamp_website_tools/* + - ./cups-test-server/* + - ./Dangerfile + - ./Gemfile + - ./Gemfile.lock + exclude: + - ./docs/README.local.md + comment: | + This file has been generated with 'invoke project.sync'. + Do not modify. Any manual change will be lost. + Please propose your modification on + https://github.com/camptocamp/odoo-template instead. diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..8f681e3 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,86 @@ +sudo: required + +services: docker + +git: + # we don't want travis to pull them for the lint build, + # so we pull them manually in the tests build + submodules: false + +addons: + apt: + packages: + # used for pylint + - python-lxml + +cache: + directories: + - $HOME/.cachedb + +env: + matrix: + - LINT_CHECK="1" + - TESTS="1" + global: + - DOCKER_COMPOSE_VERSION="1.17.1" + - TESTS="0" LINT_CHECK="0" + - ADDONS_DIR="${TRAVIS_BUILD_DIR}/odoo/local-src" + - SONGS_DIR="${TRAVIS_BUILD_DIR}/odoo/songs" + - COMPOSE_PROJECT_NAME=geo_11 + - GENERATED_IMAGE="${COMPOSE_PROJECT_NAME}_odoo" + - DOCKER_HUB_REPO="camptocamp/${GENERATED_IMAGE}" + - TRAVIS_COMPOSE="travis/docker-compose.yml" + - RANCHER_MINION_SERVER="https://${COMPOSE_PROJECT_NAME}.odoo-test.camptocamp.ch" + +before_install: + - git config --global user.name "TravisCI" + - git config --global user.email "travis@camptocamp.com" + # Generate a md5 from submodules to identify the dump. + # We will generate a new dump each time any submodule changes + - if [ "$TESTS" == "1" ] ; then export SUBS_MD5=$(git submodule status | md5sum | cut -d ' ' -f1) ; fi + # Only create cached dump on commits on master and not on tags. + # Database dumps will land on the master's cache only. + - if [ "$TESTS" == "1" -a "$TRAVIS_PULL_REQUEST" == "false" -a -z "$TRAVIS_TAG" ] ; then export CREATE_DB_CACHE="true" ; fi + - if [ "$LINT_CHECK" == "1" ] ; then pip install --user -q flake8 pylint pylint_odoo; fi + - if [ "$LINT_CHECK" == "0" ] ; then pip install --user -q gitpython requests pyOpenSSL ndg-httpsclient pyasn1 pyyaml; fi + +install: + - if [ "$TESTS" == "1" ] ; then bundle install ; fi + - if [ "$LINT_CHECK" == "0" ] ; then ./travis/git_submodule_update.py ; fi + # we always specify '-f docker-compose.yml' so 'docker-compose.override.yml' + # is not used and so the volumes are not shared with the host + - if [ "$LINT_CHECK" == "0" ] ; then docker-compose -f $TRAVIS_COMPOSE build --pull ; fi + - if [ "$TESTS" == "1" ] ; then docker-compose -f $TRAVIS_COMPOSE up -d db ; fi + - if [ "$TESTS" == "1" ] ; then docker-compose -f $TRAVIS_COMPOSE ps ; fi + +# We run the tests and migration in the same build, so we can push the docker image +# after that we validated it has tests and migration passing +script: + - if [ "$TESTS" == "1" ] ; then bundle exec danger ; fi + - if [ "$LINT_CHECK" == "1" ] ; then flake8 ${ADDONS_DIR} ${SONGS_DIR} --exclude=__init__.py ; fi + - if [ "$LINT_CHECK" == "1" ] ; then pylint --rcfile=travis/pylintrc --valid_odoo_versions=11.0 ${ADDONS_DIR}/* ; fi + # run tests on an empty database, which is dropped afterwards + - if [ "$TESTS" == "1" ] ; then docker-compose -f $TRAVIS_COMPOSE run --rm -e SUBS_MD5=${SUBS_MD5} -e CREATE_DB_CACHE=${CREATE_DB_CACHE} -e DEMO=True odoo runtests ; fi + # run the migration + # + # Notes on releases: + # + # On each release both upgrade step and install from scratch are tested + # on separate builds. (even if cache exists) + # + # When creating a release a git tag will generate 2 builds on travis: + # - one for the new tag + # - a second for the commit on master. + # + # As the tag build is used to generate a new release we want to end it fast + # to ship it quickly. Thus as odoo/VERSION has changed it needs to check for + # previous tag cached database. MIG_LOAD_VERSION_CEIL is there for that. + # + # However, to ensure a build from scratch is done on each new release + # the search of a previous version dump is not active on commits on master. + # This will force a complete build when odoo/VERSION has been edited and doesn't + # match a cached database dump because such dump has not been created yet. + - if [ "$TESTS" == "1" ] ; then docker-compose -f $TRAVIS_COMPOSE run --rm -e MARABUNTA_MODE=sample -e MARABUNTA_ALLOW_SERIE=True -e CREATE_DB_CACHE=${CREATE_DB_CACHE} -e MIG_LOAD_VERSION_CEIL=${TRAVIS_TAG} odoo runmigration ; fi + +after_success: + - if [ "$TESTS" == "1" ] ; then ./travis/publish.sh ; fi diff --git a/Dangerfile b/Dangerfile new file mode 100644 index 0000000..8aa1e4b --- /dev/null +++ b/Dangerfile @@ -0,0 +1,129 @@ +# -*- mode: ruby -*- +# Sometimes it's a README fix, or something like that - which isn't relevant for +# including in a project's HISTORY.rst for example +declared_trivial = (github.pr_title + github.pr_body).include? "#trivial" +impacted_files = git.modified_files | git.added_files | git.deleted_files +modified_python_files = impacted_files.select{ |i| i[/local-src\/.*\.py$/] } +modified_xml_files = impacted_files.select{ |i| i[/local-src\/.*\.xml$/] } +has_local_python_changes = !modified_python_files.empty? +has_tests_changes = !git.modified_files.grep(/tests/).empty? + +# Mainly to encourage writing up some reasoning about the PR, rather than +# just leaving a title +if github.pr_body.length < 5 + fail "Please provide a summary in the Pull Request description" +end + +# Make it more obvious that a PR is a work in progress and shouldn't be merged yet +warn("PR is headlined as Work in Progress") if github.pr_title.include? "[WIP]" +warn("PR has a 'work in progress' label on it") if github.pr_labels.include? "work in progress" + +# Warn when there is a big PR +warn("Big PR") if git.lines_of_code > 500 + +# we could consider using danger-changelog but then probably switch history to markdown +no_changelog_entry = !git.modified_files.include?("HISTORY.rst") +if no_changelog_entry && !declared_trivial + warn("Please consider adding a note in HISTORY.rst") +end + +### migration.yml checks +require("yaml") +migration_file = File.read("odoo/migration.yml") +# Hash.dig("a", "b") is a safer version of Hash["a"]["b"] +version_migrations = YAML.load(migration_file).dig("migration", "versions") +latest_version_migration = version_migrations[-1] if version_migrations +addons_to_upgrade = latest_version_migration.dig("addons", "upgrade") + +# TODO: currently, it's impossible to get an addons_to_upgrade list in the case when +# it is hidden under `modes` section or something similar - it works only in +# cases when `addons` section is a direct ancestor of an element of `versions` array +unless addons_to_upgrade + warn("Skipping migration.yml check cause of incompatible .yml structure" \ + " - please revise your last migration step yourself") +else + # ensure that each module is listed only once in the last migration step + # array.uniq! would return `nil` if that array held unique values at the moment of calling + warn("Last migration step contains duplicates") if addons_to_upgrade.uniq! + + # check if migration.yml is intact w/ the latest changes + # Hard check: modules having XML updates - update is *required* + # `split` following the common pattern "odoo/local-src//..." + # to get the module name out of it + xml_impact_modules = modified_xml_files.map{ |i| i.split("/")[2] } + # names of modules present in diff, though absent in an last upgrade step + upgrade_required = xml_impact_modules - addons_to_upgrade + # fail w/ informative msg if not every impacted module is present in the last migration step + unless upgrade_required.empty? + migration_needed_msg = ".xml: entry in a last migration.yml is required for modules: " + migration_needed_msg += upgrade_required.join(", ") + migration_needed_msg += " // please state every local module in a migration.yml explicitly" + fail(migration_needed_msg) + end + + # Soft check: modules having Python updates - update is advised + python_impact_modules = modified_python_files.map{ |i| i.split("/")[2] } + upgrade_advised = python_impact_modules - addons_to_upgrade + # since we're done w/ `upgrade required` step now, + # we don't need to highlight those for the second time + upgrade_advised -= upgrade_required + unless upgrade_advised.empty? + migration_advised_msg = ".py: entry in a last migration.yml is advised for modules: " + migration_advised_msg += upgrade_advised.join(", ") + migration_advised_msg += " // please state every local module in a migration.yml explicitly" + warn(migration_advised_msg) + end +end + +# check if requested migration step targets the correct version +latest_released_version = File.read("odoo/VERSION").split("\n")[0] +if latest_version_migration["version"] <= latest_released_version + invalid_target_version_msg = "Last migration step targets already released version." + if upgrade_required + # then we surely need to do a next migration step + fail(invalid_target_version_msg) + else + # still it is possible that no upgrades were required since last release + warn(invalid_target_version_msg) + end +end + +if has_local_python_changes && !has_tests_changes + warn("There are changes in local addons but no new test. Please consider adding some.", sticky: false) +end + +### Submodule sanity check +# FIXME: currently is being shadowed by the original one +# https://github.com/camptocamp/odoo-template/blob/ee96d4b/%7B%7Bcookiecutter.repo_name%7D%7D/travis/git_submodule_update.py#L49 +require('git') +repo = Git.open(".") +# actual submodule paths (hashtable a.k.a. dict) +submodules = repo.config.select{ |k, v| k[/^submodule\.odoo\/[\w\/-]+/] } +pending_merges = (YAML.load(File.open("odoo/pending-merges.yaml")) or {}) + +def normalize_git_url(url) + url.downcase! + url.sub!("git@github.com:", "https://github.com/") if url.match(/git@github\.com:/) + url = url[0..-5] if url.match(/.*\.git$/) +end + +# pattern: |submodule.odoo/.url| => +# |0 --------- 14 ^--- ^ -5 | +for submodule_path, target_url in submodules + submodule_path = "." + submodule_path[14..-5] + if pending_merges.key?(submodule_path) + pending_repo = pending_merges[submodule_path] + target = pending_repo["target"].split()[0] + target_remote = pending_repo["remotes"][target] + if normalize_git_url(target_remote) != normalize_git_url(target_url) + fail(%{ +In .gitmodules #{submodule_path}: + remote #{target_remote} does not match + target url #{target_url} + in pending.merges.yml + }) + end + end +end + +commit_lint.check disable: [:subject_length, :subject_cap] diff --git a/Gemfile b/Gemfile new file mode 100644 index 0000000..fdbb85d --- /dev/null +++ b/Gemfile @@ -0,0 +1,5 @@ +source "https://rubygems.org" + +gem "danger" +gem "danger-commit_lint" +gem "git" diff --git a/Gemfile.lock b/Gemfile.lock new file mode 100644 index 0000000..b996bbb --- /dev/null +++ b/Gemfile.lock @@ -0,0 +1,57 @@ +GEM + remote: https://rubygems.org/ + specs: + addressable (2.5.2) + public_suffix (>= 2.0.2, < 4.0) + claide (1.0.2) + claide-plugins (0.9.2) + cork + nap + open4 (~> 1.3) + colored2 (3.1.2) + cork (0.3.0) + colored2 (~> 3.1) + danger (5.6.4) + claide (~> 1.0) + claide-plugins (>= 0.9.2) + colored2 (~> 3.1) + cork (~> 0.1) + faraday (~> 0.9) + faraday-http-cache (~> 1.0) + git (~> 1) + kramdown (~> 1.5) + no_proxy_fix + octokit (~> 4.7) + terminal-table (~> 1) + danger-commit_lint (0.0.6) + danger (~> 5.0) + faraday (0.15.2) + multipart-post (>= 1.2, < 3) + faraday-http-cache (1.3.1) + faraday (~> 0.8) + git (1.4.0) + kramdown (1.17.0) + multipart-post (2.0.0) + nap (1.1.0) + no_proxy_fix (0.1.2) + octokit (4.9.0) + sawyer (~> 0.8.0, >= 0.5.3) + open4 (1.3.4) + public_suffix (3.0.2) + sawyer (0.8.1) + addressable (>= 2.3.5, < 2.6) + faraday (~> 0.8, < 1.0) + terminal-table (1.8.0) + unicode-display_width (~> 1.1, >= 1.1.1) + unicode-display_width (1.4.0) + +PLATFORMS + ruby + +DEPENDENCIES + danger + danger-commit_lint + git + +BUNDLED WITH + 1.11.2 diff --git a/HISTORY.rst b/HISTORY.rst new file mode 100644 index 0000000..5b348a8 --- /dev/null +++ b/HISTORY.rst @@ -0,0 +1,28 @@ +.. :changelog: + +.. Template: + +.. 0.0.1 (2016-05-09) +.. ++++++++++++++++++ + +.. **Features and Improvements** + +.. **Bugfixes** + +.. **Build** + +.. **Documentation** + +Release History +--------------- + +latest (unreleased) ++++++++++++++++++++ + +**Features and Improvements** + +**Bugfixes** + +**Build** + +**Documentation** diff --git a/README.md b/README.md new file mode 100644 index 0000000..d0218ba --- /dev/null +++ b/README.md @@ -0,0 +1,32 @@ +[![Build Status](https://travis-ci.com/camptocamp/geo_11_odoo.svg?token=3A3ZhwttEcmdqp7JzQb7&branch=master)](https://travis-ci.com/camptocamp/geo_11_odoo) + +# geo_11 Odoo + +**Our internal id for this project is: 0000.** + +This project uses Docker. +Travis builds a new image for each change on the branches and for each new tag. + +The images built on the master branch are built as `camptocamp/geo_11_odoo:latest`. +The images built on other branches are built as `camptocamp/geo_11_odoo:`. +The ones built from tags are built as `camptocamp/geo_11_odoo:`. + +Images are pushed on the registry only when Travis has a green build. + +When a container starts, the database is automatically created and the +migration scripts automatically run. + +## Project maintenance + +Please keep this project up-to-date by: + +* ensure the `FROM` image in `odoo/Dockerfile` is the latest release +* run regularly `invoke project.sync` to retrieve the last template's changes + +## Links + +* [General documentation](./docs/README.md) +* [Local documentation](./docs/README.local.md) +* [Changelog](HISTORY.rst). +* [Minions](https://geo_11.odoo-test.camptocamp.ch) +* [Base image documentation](https://github.com/camptocamp/docker-odoo-project) diff --git a/cups-test-server/Dockerfile b/cups-test-server/Dockerfile new file mode 100644 index 0000000..38c5099 --- /dev/null +++ b/cups-test-server/Dockerfile @@ -0,0 +1,4 @@ +FROM aadl/cups-alpine +MAINTAINER Camptocamp + + diff --git a/cups-test-server/README.md b/cups-test-server/README.md new file mode 100644 index 0000000..e6c9674 --- /dev/null +++ b/cups-test-server/README.md @@ -0,0 +1,65 @@ +# CUPS Server + +## Setup + +### CUPS container + +In a brand new environment when you do `docker-compose up` you get `cups-server` service automatically. + +In an existing environment just do `docker-compose up -d cups-server`. + +### Odoo container + +If your project relies on CUPS and `base_report_to_printer` then you should already have this. +In any case, make sure you have these requirements in place: + +* install `cups` and `libcups2-dev` in `odoo/Dockerfile` +* `/odoo/external-src/report-print-send` in ADDONS_PATH +* `git@github.com:OCA/report-print-send.git` checked out in the path above + + +## CUPS Configuration + +Go to `http://localhost:6631` and type credentials `admin / secr3t`. + +Then you have to configure a printer: + +1. Go to Administration -> Printers -> click on `Add printer` + +.. image:: ./images/cups_administration.png + +2. Select `CUPS-PDF` and continue + +.. image:: ./images/cups_add_printer1.png + +3. Leave printer info as it is and make sure to enable `Share This Printer` + +.. image:: ./images/cups_add_printer2.png + +4. Select `generic` and continue + +.. image:: ./images/cups_set_printer_type1.png + +5. Select `Generic CUPS-PDF Printer (en)` and continue + +.. image:: ./images/cups_set_printer_type2.png + +6. Set default size to `A4` and click on `Set Default Options` + +.. image:: ./images/cups_set_printer_options.png + + +Now you can go to `Printers` and see your brand new printer. + +## Odoo Configuration + +The final step is to tell Odoo about your printer: + +1. Go to Settings -> Printers -> Create +2. Give it a name and set server as `cups-server`, default port `631` is ok +3. Save and click on `Update printers` -> your PDF printer should show up + +.. image:: ./images/odoo_printer.png + + +Your are done now. Every printed document will land into `/tmp/cups-pdf` folder. diff --git a/cups-test-server/images/cups_add_printer1.png b/cups-test-server/images/cups_add_printer1.png new file mode 100644 index 0000000..d234f9e Binary files /dev/null and b/cups-test-server/images/cups_add_printer1.png differ diff --git a/cups-test-server/images/cups_add_printer2.png b/cups-test-server/images/cups_add_printer2.png new file mode 100644 index 0000000..c1b1622 Binary files /dev/null and b/cups-test-server/images/cups_add_printer2.png differ diff --git a/cups-test-server/images/cups_administration.png b/cups-test-server/images/cups_administration.png new file mode 100644 index 0000000..01f3bba Binary files /dev/null and b/cups-test-server/images/cups_administration.png differ diff --git a/cups-test-server/images/cups_set_printer_options.png b/cups-test-server/images/cups_set_printer_options.png new file mode 100644 index 0000000..54c9915 Binary files /dev/null and b/cups-test-server/images/cups_set_printer_options.png differ diff --git a/cups-test-server/images/cups_set_printer_type1.png b/cups-test-server/images/cups_set_printer_type1.png new file mode 100644 index 0000000..f4e14a8 Binary files /dev/null and b/cups-test-server/images/cups_set_printer_type1.png differ diff --git a/cups-test-server/images/cups_set_printer_type2.png b/cups-test-server/images/cups_set_printer_type2.png new file mode 100644 index 0000000..cf9f432 Binary files /dev/null and b/cups-test-server/images/cups_set_printer_type2.png differ diff --git a/cups-test-server/images/odoo_printer.png b/cups-test-server/images/odoo_printer.png new file mode 100644 index 0000000..65dabd4 Binary files /dev/null and b/cups-test-server/images/odoo_printer.png differ diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..12f5519 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,84 @@ +# Root file for the dev composition. +# +# This contains the common configuration for all developers +# +# You can create a 'docker-compose.override.yml' to customize it according +# to your needs it will automatically be applied on top of this +# file when the option '-f' of docker-compose is not used + +version: '2' +services: + odoo: + build: ./odoo/ + # Enable if you use oca/connector: + # command: "odoo.py --load=web,connector" + depends_on: + - db + tty: true + stdin_open: true + ports: + - 8069 + - 8072 + volumes: + - "data-odoo:/data/odoo" + - "data-odoo-db-cache:/.cachedb" + - "data-odoo-pytest-cache:/odoo/.cache" + - "./odoo/src:/odoo/src" + - "./odoo/local-src:/odoo/local-src" + - "./odoo/external-src:/odoo/external-src" + - "./odoo/songs:/odoo/songs" + - "./odoo/migration.yml:/odoo/migration.yml" + - "./odoo/data:/odoo/data" + environment: + DB_USER: odoo + DB_PASSWORD: odoo + DB_NAME: odoodb + ADMIN_PASSWD: mijaebohb8Thae1g + RUNNING_ENV: dev + # Enable if you use oca/connector: + # ODOO_CONNECTOR_CHANNELS: root:4 + MARABUNTA_MODE: sample # could be 'full' for the db with all the data + MARABUNTA_ALLOW_SERIE: 'True' # should not be set in production + + db: + image: camptocamp/postgres:9.5 + command: -c shared_buffers=256MB -c maintenance_work_mem=256MB -c wal_buffers=8MB -c effective_cache_size=1024MB + ports: + - 5432 + environment: + POSTGRES_USER: odoo + POSTGRES_PASSWORD: odoo + volumes: + - "data-db:/var/lib/postgresql/data" + + nginx: + image: camptocamp/odoo-nginx:11.0-1.3.0 + depends_on: + - odoo + ports: + - 80:80 + + mailhog: + image: mailhog/mailhog + ports: + - 8025:8025 + +## If you need a CUPS server for `base_report_to_printer` +## See `cups-test-server/README.md`. + # cups-server: + # build: ./cups-test-server/ + # ports: + # - "6631:631" + # expose: + # - 631 + # volumes: + # - "/tmp/cups-pdf:/var/spool/cups-pdf:rw" + # environment: + # CUPS_USER_ADMIN: admin + # CUPS_USER_PASSWORD: secr3t + +volumes: + data-odoo: + data-db: + data-odoo-db-cache: + data-odoo-pytest-cache: diff --git a/docs/README.local.md b/docs/README.local.md new file mode 100644 index 0000000..e85a545 --- /dev/null +++ b/docs/README.local.md @@ -0,0 +1,3 @@ +# Local Documentation + +Empty. You can add an index for the documentation specific to this project. diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..cb92853 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,34 @@ +# General Documentation + +## Guides + +* [Docker pre-requisite](./prerequisites.md) +* [Docker developer guide](./docker-dev.md) +* [Docker tester guide](./docker-test.md) +* [Docker and databases](./docker-and-databases.md) +* [Structure](./structure.md) +* [Releases and versioning](./releases.md) +* [Pull Requests](./pull-requests.md) +* [Upgrade scripts](./upgrade-scripts.md) +* [Docker Images](./docker-images.md) +* [Using automated tasks with Invoke](./invoke.md) +* [Odoo Cloud Platform](https://confluence.camptocamp.com/confluence/display/BS/Odoo+Cloud+Platform+-+Technical) +* [Odoo Test Cloud Platform - Minions](https://confluence.camptocamp.com/confluence/display/BS/Odoo+Test+Platform+-+Minions+-+Technical) + +## How-to + +* [How to add a new addons repository](./how-to-add-repo.md) +* [How to add a Python or Debian dependency](./how-to-add-dependency.md) +* [How to add a language in Odoo](./how-to-add-odoo-language.md) +* [How to integrate an open pull request of an external repository](./how-to-integrate-pull-request.md) +* [How to connect to psql in Docker](./how-to-connect-to-docker-psql.md) +* [How to change Odoo configuration values](./how-to-set-odoo-configuration-values.md) +* [How to backup and restore volumes](./how-to-backup-and-restore-volumes.md) +* [How to use a PROD DB dump in DEV mode](./how-to-use-a-prod-db-in-dev.md) +* [How to pgBadger](./how-to-pgbadger.md) +* [How to test CUPS printing](../cups-test-server/README.md) +* [How to test SMTP in dev](./how-to-test-smtp.md) + +## Local Documentation + +* [Index for local Documentation](./README.local.md) diff --git a/docs/docker-and-databases.md b/docs/docker-and-databases.md new file mode 100644 index 0000000..c7c4e6a --- /dev/null +++ b/docs/docker-and-databases.md @@ -0,0 +1,218 @@ +# Docker and Databases + +### Working with several databases + +The Docker image only starts on one database and does not allow switching +databases at runtime. However, you can and should use several databases on your +postgres container for enabling databases for different usages or development. + +The default database name will be the one configured in the variable `DB_NAME` +in `docker-compose.yml` (usually `odoodb`). + +So if you just start a new odoo container using: + +``` +docker-compose run --rm odoo +``` + +You will work on `odoodb`. + +Now let's say you want to work on a database with odoo demo data and no marabunta migration: + +``` +docker-compose run --rm -e MIGRATE=False -e DEMO=True -e DB_NAME=odoo_demo odoo +``` + +If you want to use a dump from production restored in a db called prod, you can run: + +``` +docker-compose run --rm -e DB_NAME=prod odoo +``` + +## Inspecting databases + +### Automated task to list versions + +You can list the odoo databases in your Docker volume using the automated +invoke task : + +``` +invoke database.list-versions +[...] +DB Name Version Install date +======= ======= ============ +odoodb 11.2.0 2018-08-05 +prod 11.1.0 2018-07-27 +odoo_demo unknown unknown +``` + +Version and install date are stored on `marabunta_version` table of each DB. +As we specified `-e MIGRATE=False` on `odoo_demo`, no Marabunta migration was +executed, thus version and install date are unknown. + + +### Manual + +However you can also inspect the databases manually, and you should also find +your 3 databases : + +``` +$ docker-compose run --rm odoo psql -l +[...] + List of databases + Name | Owner | Encoding | Collate | Ctype | Access privileges +-----------+----------+----------+------------+------------+----------------------- + odoo | postgres | UTF8 | en_US.utf8 | en_US.utf8 | + odoodb | odoo | UTF8 | en_US.utf8 | en_US.utf8 | + prod | odoo | UTF8 | en_US.utf8 | en_US.utf8 | + odoo_demo | odoo | UTF8 | en_US.utf8 | en_US.utf8 | + postgres | postgres | UTF8 | en_US.utf8 | en_US.utf8 | + template0 | postgres | UTF8 | en_US.utf8 | en_US.utf8 | =c/postgres + + | | | | | postgres=CTc/postgres + template1 | postgres | UTF8 | en_US.utf8 | en_US.utf8 | =c/postgres + + | | | | | postgres=CTc/postgres +``` + +And you can work as you want on any of them by changing the `DB_NAME`. + + +## Backup and restore with dumps + +### Create a local dump using automated invoke task + +Dump generation is automated by invoke, so the only thing you +have to do is : + +``` +invoke database.local-dump +``` + +And a *.pg dump will be generated on your project folder. + +Moreover you can select which database you want to dump and change the +destination folder using : + +``` +invoke database.local-dump --db-name=odoodb --path=~/my_dumps +``` + +The generated dump will be named as username_projectname_datetime.pg + +### Create a local dump and share it on odoo-dump-bags + +Dumps are not only great for backups, they are also the easiest way to share a +database with someone else. So if you want to share one of your local database +with your colleagues, the process is totally automated and secured, so all you +have to do is: + +``` + invoke database.dump-and-share --db-name=prod-test +``` + +This will create a dump on your computer (similar as above), then encrypt it +using GPG, and finally push it on https://dump-bag.odoo.camptocamp.ch. + +When your colleague did download it, please do not forget to remove it from the +dump bag with : + +``` +invoke database.empty-my-dump-bag +``` + +### Create a dump manually + +If you have the same `pg_dump` version on your computer than the one used in the +db container (9.5 at time of writing), you can just use your local `pg_dump` +directly on the outgoing port of the db container (see [how to find the +port](how-to-connect-to-docker-psql.md)). Example: + +``` +$ pg_dump -h localhost -p 32768 --format=c -U odoo --file db.pg odoodb +``` + +Note : When using `odoo` DB User (role), keep in mind its password is `odoo` + +If you have an older version of `postgres-client`, `pg_dump` will refuse to +make a dump. An option is to update your `postgres-client`. Here is another option using a `postgres:9.5` one-off container (the `db` container +must be running): + +```bash +$ export HOST_BACKUPS=/path/of/hosts/backups # Where you want to save the backups +$ export PROJECT_NAME=project_name (the prefix of containers, volumes, networks, usually the root folder's name) + +$ docker run --rm --net=${PROJECT_NAME}_default --link ${PROJECT_NAME}_db_1:db -e PGPASSWORD=odoo -v $HOST_BACKUPS:/backup postgres:9.5 pg_dump -Uodoo --file /backup/db.pg --format=c odoodb -h db +``` + +### Restore a dump using container + +You can restore any dump without worrying too much by commands from the odoo +container. + +First you will have to create an empty database with odoo user as its owner and +giving it a name : +``` +docker-compose run --rm odoo createdb -O odoo my_restored_database +``` + +Then you can use the `pg_restore` command from the odoo container passing the +path of your dump file: +``` +docker-compose run --rm odoo pg_restore -p 5432 -d my_restored_database < ~/my_dumps/username_projectname_datetime.pg +``` + + +### Restore using local command + +You should always prefer the method above. + +If you have the same `pg_restore` version on your computer than the one used in the +db container (9.5 at time of writing), you can just use your local `pg_restore` +directly on the outgoing port of the db container (see [how to find the +port](how-to-connect-to-docker-psql.md)). Example: + +``` +$ createdb -h localhost -p 32768 -O odoo prod +$ pg_restore -h localhost -p 32768 -O -U odoo -j2 -d prod +``` + +If you have an older version of `postgres-client`, `pg_restore` will refuse to +restore the dump. An option is to update your `postgres-client`. Here is another option using a `postgres:9.5` one-off container (the `db` container +must be running): + +```bash +$ export HOST_BACKUPS=/path/of/hosts/backups # From where you want to restore the backup +$ export PROJECT_NAME=project_name (the prefix of containers, volumes, networks, usually the root folder's name) + +$ docker run --rm --net=${PROJECT_NAME}_default --link ${PROJECT_NAME}_db_1:db -e PGPASSWORD=odoo postgres:9.5 createdb -h db -O odoo prod +$ docker run --rm --net=${PROJECT_NAME}_default --link ${PROJECT_NAME}_db_1:db -e PGPASSWORD=odoo -v $HOST_BACKUPS:/backup postgres:9.5 pg_restore -h db -O -U odoo --file /backup/db.pg -j2 -d prod +``` + +## Drop a database + +Sometimes a database might broken or you simply want to recreate a fresh instance +from scratch, so to drop the `odoodb` database simply use: + +``` +docker-compose run --rm odoo dropdb odoodb +``` + +If somehow your image is broken, you can also use your local dropdb command, +but you need to know which port is exposed. So you should ensure your db +container is started: + +``` +docker-compose up -d db +``` + +Then you can retrieve the exposed port using: + +``` +docker-compose port db 5432 | cut -d : -f 2 +``` + +And finally drop it using the port (replace 32768 in this example with the +result from above) and the db name: + +``` +dropdb -h localhost -p 32768 -U odoo odoodb +``` diff --git a/docs/docker-dev.md b/docs/docker-dev.md new file mode 100644 index 0000000..4bc3652 --- /dev/null +++ b/docs/docker-dev.md @@ -0,0 +1,272 @@ +# Working on the project as developers + +## Pre-requisite + +Be sure to [install Docker and docker-compose](prerequisites.md) before going any further. + +Before starting, be aware of [the documentation of the core +image](https://github.com/camptocamp/docker-odoo-project). + +## Starting, git submodules + +1. Clone the project + + git clone git@github.com:camptocamp/geo_11_odoo.git geo_11 + +2. Submodules + + You have two options: + + 1. Clone the submodules from scratch + + ```bash + git submodule update --init + ``` + + If you have an error because a ref cannot be found, it is probably that the + remote has changes, you just need to run the following command that will update + the remote: + + ```bash + git submodule sync + ``` + + 2. Use existing cloned repositories + + The Odoo repo `odoo/src` will take quite some time if pulled from scratch. + If you already have a local checkout of one or more submodules + you can save a lot of time avoiding to download the whole repos, by doing this: + + ``` + cp -r path/to/odoo odoo/src + cp -r path/to/server-tools odoo/external-src/ + git submodule update --init + ``` + Be aware that path/to/odoo (or path/to/server-tools) has to be a local clone + of a repository, because it won't work if you copy a submodule from another project. + +## Docker + +### Build of the image + +In a development environment, building the image is rarely necessary. The +production images are built by Travis. Furthermore, In the development +environment we share the local (source code) folders with the container using +`volumes` so we don't need to `COPY` the files in the container. + +Building the image is required when: + +* you start to work on the project +* the base image (`camptocamp/odoo-project:11.0`) has been updated and you need + the new version +* the local Dockerfile has been modified (for example when dependency or addons + repository is added) + +Building the image is a simple command: + +```bash +# build the docker image locally (--pull pulls the base images before building the local image) +docker-compose build --pull +``` + +You could also first pull the base images, then run the build: + +```bash +docker-compose pull +docker-compose build +``` + +### Usage + +When you need to launch the services of the composition, you can either run them in foreground or in background. + +```bash +docker-compose up +``` +Will run the services (postgres, odoo, nginx) in foreground, mixing the logs of all the services. + +```bash +docker-compose up -d +``` +Will run the services (postgres, odoo, nginx) in background. + +When it is running in background, you can show the logs of one service or all of them (mixed): + +```bash +docker-compose logs odoo # show logs of odoo +docker-compose logs postgres # show logs of postgres +docker-compose logs nginx # show logs of nginx +docker-compose logs # show all logs +``` + +And you can see the details of the running services with: + +```bash +docker-compose ps +``` + +In the default configuration, the Odoo port changes each time the service is +started. Some prefer to always have the same port, if you are one of them, you +can create your own configuration file or adapt the default one locally. + +To know the port of the running Odoo, you can use the command `docker ps` that +shows information about all the running containers or the subcommand `port`: + +```bash +docker ps +docker-compose port odoo 8069 # for the service 'odoo', ask the corresponding port for the container's 8069 port +``` + +This command can be used to open directly a browser which can be nicely aliased (see later). + +```bash +export BROWSER="chromium-browser --incognito" # or firefox --private-window +$BROWSER $(docker-compose port odoo 8069) +``` + +Last but not least, we'll see other means to run Odoo, because `docker-compose +up` is not really good when it comes to real development with inputs and +interactions such as `pdb`. + +**docker-compose exec** allows to *enter* in a already running container, which +can be handy to inspect files, check something, ... + +```bash +# show odoo configuration file (the container name is found using 'docker ps') +docker-compose exec odoo cat /etc/odoo.cfg +# run bash in the running odoo container +docker exec odoo bash +``` + +**docker run** spawns a new container for a given service, allowing the +interactive mode, which is exactly what we want to run Odoo with pdb. +This is probably the command you'll use the most often. + +The `--rm` option drops the container after usage, which is usually what we +want. + +```bash +# start Odoo (use workers=0 for dev) +docker-compose run --rm odoo odoo --workers=0 ... additional arguments +# start Odoo and expose the port 8069 to the host on the port 80 +docker-compose run --rm -p 80:8069 odoo odoo +# open an odoo shell +docker-compose run --rm odoo odoo shell +``` + +`workers=0` let you use your `pdb` interactive mode without trouble otherwise +you will have to deal with one trace per worker that catched a breakpoint. +Plus, it will stop the annoying `bus is not available` errors. + +### Handy aliases + +Finally, a few aliases suggestions: + +```bash +alias doco='docker-compose' +alias docu='docker-compose up -d' +alias docl='docker-compose logs' +alias docsh='docker-compose run --rm odoo odoo shell' +alias dood='docker-compose run --rm odoo odoo' +alias bro='chromium-browser --incognito $(docker-compose port odoo 8069)' +# run anthem song. Just run `dood_anthem songs.install.foo::baz` +alias dood_anthem='docker-compose run --rm odoo anthem' +# run odoo w/ connector jobrunner. Just run `dood_conn` instead of dood (connector v9) +alias dood_conn='docker-compose run --rm odoo odoo --workers=0 --load=web,connector' +# run odoo w/ queue_job jobrunner. Just run `dood_queue` instead of dood (connector v10) +alias dood_queue='docker-compose run --rm odoo odoo --workers=0 --load=web,queue_job' +# run odoo without marabunta migration. Just run `dood_nomig` +alias dood_nomig='docker-compose run --rm -e MIGRATE=False odoo odoo --workers=0' +``` + +and to speed up your testing sessions (see [core images' test doc](https://github.com/camptocamp/docker-odoo-project#running-tests)): + +```bash +# setup test database. Just run `dood_test_setup` +alias dood_test_setup='docker-compose run --rm -e DB_NAME=testdb odoo testdb-gen -i base' +# reuse testdb and install or update modules on demand. Just run `dood_test_update -i/u something` +alias dood_test_update='docker-compose run --rm -e DB_NAME=testdb odoo testdb-update' +# run tests using pytest. Just run `dood_test_run path/to/your/module` +# NOTE: you need to run dood_test_update 1st IF xml or models have been updated +alias dood_test_run='docker-compose run --rm -e DB_NAME=testdb odoo pytest -s' +# run tests using std odoo test machinery (eg: you need an HttpCase). Just run `dood_test_run_odoo -u module` +alias dood_test_run_odoo='docker-compose run --rm -e DEMO=True -e DB_NAME=testdb -e MIGRATE=False odoo odoo --workers=0 --test-enable --stop-after-init' +``` + +Usage of the aliases / commands: +```bash + +# Start all the containers in background +docu + +# Show status of containers +doco ps + +# show logs of odoo or postgres +docl odoo +docl db + +# run a one-off command in a container +doco run --rm odoo bash + +# open a chromium browser on the running odoo +bro + +# stop all the containers +doco stop + +# upgrade module and or run tests +dood -u my_module [--test-enable] + +# if you are using the `connector` remember to pass the `load` attribute +dood --load=web,connector +``` + +### Working with several databases + +This section has been moved to : [working-with-several-databases](docker-and-databases.md#working-with-several-databases). + + +### Extra dev docker composition + +You might want to customize your docker composition like adding a container or setting specific ports. +For this use `docker-compose.override.yml` file which will always be loaded unless `-f` option of docker-compose +is used. + +Example: + +``` +# content of docker-compose.override.yml +version: '2' + +services: + odoo: + environment: + WORKERS=0 +``` + + +### Extra dev packages + +You might want to use additional python packages while developing (eg: pdbpp, ipdb, etc). +You can easily add them in `odoo/dev_requirements.txt` and build again odoo container: + +```bash +echo "pdbpp" >> odoo/dev_requirements.txt +doco build odoo +``` + + +### Troubleshooting + +``` +pkg_resources.DistributionNotFound: The 'odoo==10.0' distribution was not found and is required by the application +``` + +This error can happen after switching Odoo version in the same project. +You should then manually delete the `.egg-info` and `__pycache__` folders : + +``` +sudo rm -rf 'find -name *.egg-info' +sudo rm -rf 'find -name __pycache__' +``` diff --git a/docs/docker-images.md b/docs/docker-images.md new file mode 100644 index 0000000..9eec81e --- /dev/null +++ b/docs/docker-images.md @@ -0,0 +1,36 @@ +# Automated Docker Images + +## Travis deployment + +When Travis runs, it builds a Docker image and runs the tests inside it. +If the tests pass, it uploads the image to DockerHub and generate a new test +instance on the [test platform](./odoo-test-platform.md). + +## Rancher templates + +### Test instances + +See [Odoo Test Plaftorm](./odoo-test-platform.md) + +### Integration and production instances + +The Rancher templates for the integration and production instances are grouped in a project +for the platform: + +* https://github.com/camptocamp/odoo-cloud-platform-ch-rancher-templates + +## Docker images + +Docker images for Odoo are generated and pushed to [Docker Hub](https://hub.docker.com) by Travis when builds are successful. +This push is done in [travis/publish.sh](../travis/publish.sh) which is called by [travis.yml](../.travis.yml) in `after_success` section. + +This script will tag docker image with: + * latest: When the build was triggered by a commit on master + * `git tag name`: When the build was triggered after a new tag is pushed. + * a tag generated with the git commit, used by the test instances + +To be able to push an image, Travis must have access to your project on Docker Hub. +Please be sure that the [Hub user has been created and configured in +Travis](https://confluence.camptocamp.com/confluence/display/BS/Technical+details+on+creating+new+project) + +**From there, each travis successful build on master or on tags will build a docker image and push it to Docker Hub** diff --git a/docs/docker-test.md b/docs/docker-test.md new file mode 100644 index 0000000..74d82c1 --- /dev/null +++ b/docs/docker-test.md @@ -0,0 +1,36 @@ +# How to run a test server, the short way + +This method is mostly for project managers or functional testers because it uses the pre-generated Docker images. Developers will prefer to use [Docker in development mode](docker-dev.md). + +## Pre-requisite + +Be sure to [install Docker and docker-compose](prerequisite.md) before going any further. + +## Steps + +1. Clone the project + + git clone git@github.com:camptocamp/geo_11_odoo.git geo_11 + +2. Login to Docker Hub (create an account on https://hub.docker.com/ if you + don't have one yet and ask to be added to the Camptocamp team because this + is a private project) + + docker login + # answer to the questions + +3. Start the composition + + cd geo_11 + docker-compose -f docker-compose.yml -f test.yml pull + docker-compose -f docker-compose.yml -f test.yml up + +4. Open a browser on http://localhost (only one odoo instance at a time can be + started because it uses the port 80, this can be changed in the + configuration file if needed) + +4. In `test.yml` you might want to adapt the odoo `image` version (so replace `latest` by a specific tag or branch). + +5. If you want to drop your database, run: + + docker-compose -f docker-compose.yml -f test.yml odoo dropdb odoodb diff --git a/docs/how-to-add-dependency.md b/docs/how-to-add-dependency.md new file mode 100644 index 0000000..444df88 --- /dev/null +++ b/docs/how-to-add-dependency.md @@ -0,0 +1,42 @@ +# Adding dependencies + +## How to add a Python package + +The Python dependencies for Odoo are already installed in the base container +(camptocamp/odoo-project:11.0) used for this project. At times, you might need to add an additional dependency required solely for this project, here are the steps. + +If the file `odoo/requirements.txt` exists, skip to number 3. + +1. Create the file `odoo/requirements.txt` +2. Add the following lines in `odoo/Dockerfile` to instruct Docker to *copy* the requirements in the image and to *install* them with `pip`: + + ``` + COPY ./requirements.txt ./ + RUN pip install -r requirements.txt + ``` + +3. Add the Python package in `odoo/requirements.txt` +4. Build again your Docker image: `docker-compose build odoo` + +You can also [add dev requirements](./docker-dev.md#extra-dev-packages) which are used on your dev machine but never +committed. + + +## How to add a Debian package + +Edit `odoo/Dockerfile` and add the following lines: + +``` +RUN set -x; \ + apt-get update \ + && apt-get install -y --no-install-recommends \ + <> \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* +``` + +If a similar command already exists. just add your package in +your list. +The cleanup at the end is important is it reduces the final size of the built image. + +Once the package added, you have to build again your local Docker image using `docker-compose build odoo` diff --git a/docs/how-to-add-odoo-language.md b/docs/how-to-add-odoo-language.md new file mode 100644 index 0000000..f18639f --- /dev/null +++ b/docs/how-to-add-odoo-language.md @@ -0,0 +1,40 @@ +# Adding language in Odoo + +## Why my language is not installable with my container? + +PO files are not added to the docker image. +By default only fr.po and de.po files are preserved when generating +docker images. + +This allow us ot optimize the size of the docker images. + +To give you an idea of the gain: + +```bash +odoo/src +*.po 285 MB +fr.po 1.28 MB +de.po 982 kB +``` + +## .dockerignore edition + +To add a langague edit the `odoo/.dockerignore` file + +``` +**/i18n/*.po +# installable lang +!**/i18n/de.po +!**/i18n/fr.po +``` + +`**/i18n/*.po` +filters all files with extension .po that are located +in a `i18n` folder + +`!` is to exclude a path definition from ignore list, it is important to +place an exclusion after the main rule or it will have no effect. + +See https://docs.docker.com/engine/reference/builder/#dockerignore-file for more details. + +Once you edited that file rebuild your image and it will contains your new language. diff --git a/docs/how-to-add-repo.md b/docs/how-to-add-repo.md new file mode 100644 index 0000000..df8595a --- /dev/null +++ b/docs/how-to-add-repo.md @@ -0,0 +1,55 @@ +# How to add a new addons repository + +External addons repositories such as the OCA ones are integrated in +the project using git submodules. + +To add a new one, you only have to add the submodule: + +``` +git submodule add -b 11.0 git@github.com:OCA/sale-workflow.git odoo/external-src/sale-workflow +git add odoo/external-src/sale-workflow +``` + +And to add it in the `ADDONS_PATH` environment variable of the +[Dockerfile](../odoo/Dockerfile). As the `Dockerfile` is modified, a rebuild is +required. + +Then commit the new submodule + +``` +git add odoo/Dockerfile +git commit -m"..." +``` + +# How to update an already installed addon repository + +**Warning:** This part of the documentation is valid only when you have no pending-merges +(see [How to integrate an open pull request of an external repository](./how-to-integrate-pull-request.md)). + +In this explanation, we are taking for example the following situation: +- There is a submodule: `my_nice_submodule` +- `my_nice_submodule` points to a specific commit in the branch `11.0` +- 2 new commits have been added to it with 10 files updated +- we want to update `my_nice_submodule` with it + +In short, updating a submodule means moving the commit reference to a new one. + +With cli you want to take latest commits +``` +cd odoo/external-src/my_nice_submodule +git checkout 11.0 +git pull +``` +Then you want to commit them +``` +cd .. +git add . +git commit -m"..." +``` + +You can now push it to your fork and open a pull-request + +**Nota Bene:** +When you have your pull-request opened on github, you should see github listing the update +of the submodule with 10 files updated. If you see directly the update of the files, it means +that the folder has been added directly to the main git repository instead of a submodule. diff --git a/docs/how-to-backup-and-restore-volumes.md b/docs/how-to-backup-and-restore-volumes.md new file mode 100644 index 0000000..80bb55c --- /dev/null +++ b/docs/how-to-backup-and-restore-volumes.md @@ -0,0 +1,34 @@ +# Backup and restore Docker Volumes + +## Backup the db and filestore (as volumes) + +```bash +$ export HOST_BACKUPS=/path/of/hosts/backups # Where you want to save the backups +$ export DATAODOO_VOLUME=project_data-odoo # Exact name to find with 'docker volume ls' +$ export DATADB_VOLUME=project_data-db # Exact name to find with 'docker volume ls' + +$ docker run --rm -v "$DATAODOO_VOLUME:/data/odoo" -v $HOST_BACKUPS:/backup debian tar cvzf /backup/backup-dataodoo.tar.gz /data/odoo +$ docker run --rm -v "$DATADB_VOLUME:/var/lib/postgresql/data" -v $HOST_BACKUPS:/backup debian tar cvzf /backup/backup-datadb.tar.gz /var/lib/postgresql/data +``` + +## Restore the db and filestore (as volumes) + +```bash +$ export HOST_PROJECT=/path/of/hosts/project # Where your docker-compose.yml is +$ export HOST_BACKUPS=/path/of/hosts/backups # Where you want to save the backups +$ export DATAODOO_VOLUME=project_data-odoo # Exact name to find with 'docker volume ls' +$ export DATADB_VOLUME=project_data-db # Exact name to find with 'docker volume ls' + +$ cd $HOST_PROJECT +$ docker-compose stop + +$ docker volume rm $DATAODOO_VOLUME +$ docker volume rm $DATADB_VOLUME + +$ docker run --rm -v "$DATAODOO_VOLUME:/data/odoo" -v $HOST_BACKUPS:/backup debian bash -c "tar xvzf /backup/backup-dataodoo.tar.gz" +$ docker run --rm -v "$DATADB_VOLUME:/var/lib/postgresql/data" -v $HOST_BACKUPS:/backup debian bash -c "tar xvzf /backup/backup-datadb.tar.gz" +``` + +## Backup and restore with dumps + +This section has been moved to [backup-and-restore-with-dumps](docker-and-databases.md#backup-and-restore-with-dumps). diff --git a/docs/how-to-connect-to-docker-psql.md b/docs/how-to-connect-to-docker-psql.md new file mode 100644 index 0000000..aa41678 --- /dev/null +++ b/docs/how-to-connect-to-docker-psql.md @@ -0,0 +1,42 @@ +# How to connect to psql in Docker + +We'll describe 3 ways to connect to `psql` in the Docker container. + +## Use the psql client of the Odoo container + +In the Odoo container, there is a `psql` client so you can start an one-off +container running it: + +``` +docker-compose run --rm odoo psql +``` + +It will automatically connect on the current database, but you can +specify another database if wanted: + +``` +docker-compose run --rm odoo psql other_db +``` + +## Use the random docker port of the db container and your local `psql` client + +``` +PGPASSWORD=odoo psql -h localhost -p $(docker-compose port db 5432 | cut -d : -f 2) -U odoo odoodb +``` + +## Set an explicit fixed port + +Edit `docker-compose.yml` or create your own configuration file, but put a fixed port for the Postgres server: + +``` +db: + ... + ports: + - "5440:5432" +``` + +And connect to this fixed port: + +``` +PGPASSWORD=odoo psql -h localhost -p 5440 -U odoo odoodb +``` diff --git a/docs/how-to-integrate-pull-request.md b/docs/how-to-integrate-pull-request.md new file mode 100644 index 0000000..2a5bda2 --- /dev/null +++ b/docs/how-to-integrate-pull-request.md @@ -0,0 +1,109 @@ +# How to integrate an open pull request of an external repository + +First, ensure that you have `git-aggregator`: + +```python +pip install git-aggregator +``` + +External addons repositories such as the OCA ones are integrated in the project +using git submodules (see [How to add a new addon repository](./how-to-add-repo.md)). +When we need to integrate a pull request that is not yet merged in the base branch +of that external repository we want to use, we create a consolidated branch that +we push on the fork at github.com/camptocamp. + +The list of all pending merges for a project is kept in `odoo/pending-merges.yaml`. +This file contains a section for each external repository with a list of pull request +to integrate. It is used to rebuild the consolidated branches at any moment using git-aggregator. + +For each repository, we maintain a branch named +`merge-branch--master` (look in `odoo/pending-merges.yaml` for the +exact name) which must be updated by someone each time the pending merges +reference file has been modified. +When we finalize a release, we create a new branch +`pending-merge--` to ensure we keep a stable branch. + +You can also create a `pending-merge--` for particular +needs. + +## Adding a new pending merge + +Beware with pending merge branches. It is easy to override a previously pushed +branch and have a submodule referencing a commit that do no longer exist. + +1. Edit `odoo/pending-merge.yaml` file, add your pull request number in a section, + if the section does not exist, add it: + + ```yaml + ./external-src/sale-workflow: + remotes: + oca: https://github.com/OCA/sale-workflow.git + camptocamp: https://github.com/camptocamp/sale-workflow.git + merges: + - oca 11.0 + # comment explaining what the PR does (42 is the number of the PR) + - oca refs/pull/42/head + # you have to replace here + target: camptocamp merge-branch--master + ``` + +2. Rebuild and push the consolidation branch for the modified branch: + + ``` + invoke submodule.merges odoo/external-src/sale-workflow + ``` + +3. If there was no pending merge for that branch before, you have to edit the `.gitmodules` file, + replacing the remote by the camptocamp's one and if a branch is specified it needs to be removed + or changed : + + ``` + [submodule "odoo/external-src/sale-workflow"] + path = odoo/external-src/sale-workflow + - url = git@github.com:OCA/sale-workflow.git + + url = git@github.com:camptocamp/sale-workflow.git + - branch = 10.0 + + branch = merge-branch--master + ``` + +4. Commit the changes and create a pull request for the change + +## Notes + +1. We usually always want the same `target` name for all the repositories, so you can use + YAML variables to write it only once, example: + + ```yaml + ./external-src/bank-payment: + ... + target: &default_target camptocamp merge-branch-0000-master + ./external-src/sale-workflow: + ... + target: *default_target + ``` + +2. If you are working on another branch than `master`, you'll need to change the name of the branch in the target. + +## Merging only one distinct commit (cherry-pick) + +Sometimes you only want to merge one commit into the consolidated branch (after +merging pull requests or not). To do so you have to add a `shell_command_after` block +in the corresponding section. Here is an example : + + ```yaml + ./external-src/enterprise: + remotes: + odoo: git@github.com:odoo/enterprise.git + camptocamp: git@github.com:camptocamp/enterprise.git + merges: + - odoo + target: *default_target + shell_command_after: + # Commit from ? Doing what ? + - git am "$(git format-patch -1 6563606f066792682a16936f704d0bdf4bc8429f -o ../patches)" + ``` + +In the previous example the commit numbered 6563606... is searched in all the remotes of the section, +then a patch file is made and apply to the consolidated branch. +A file containing the patch will be saved in the patches directory and needs to be added in the commit +of the project. diff --git a/docs/how-to-pgbadger.md b/docs/how-to-pgbadger.md new file mode 100644 index 0000000..1cec2d1 --- /dev/null +++ b/docs/how-to-pgbadger.md @@ -0,0 +1,48 @@ +# How to use pgBadger to optimize database queries + +This guide assumes you have a local database running. +It explains how to extract logs and feed them to pgBadger then you can find slow queries or other database issues. + +## Extract logs + +1. Configure PostgreSQL to generate logs to analyze, log in the database + + ``` + $ docker-compose run --rm odoo psql + ``` + And run the following `ALTER SYSTEM` commands: + + ``` + ALTER SYSTEM SET log_min_duration_statement = '0'; + ALTER SYSTEM SET log_line_prefix = '%t [%p]: [%l-1] '; + ALTER SYSTEM SET log_checkpoints = 'on'; + ALTER SYSTEM SET log_connections = 'on'; + ALTER SYSTEM SET log_disconnections = 'on'; + ALTER SYSTEM SET log_lock_waits = 'on'; + ALTER SYSTEM SET log_temp_files = '0'; + ALTER SYSTEM SET log_autovacuum_min_duration = '0'; + ALTER SYSTEM SET log_error_verbosity = 'default'; + ``` + +2. Restart the db container + + ``` + $ docker-compose restart db + ``` + +3. At this point, PostgreSQL generates a lot of logs. This is the time to log into Odoo and do the actions you want to analyze. + +4. Extract the odoo logs to a local file that will be feeded to pgBadger. + + ``` + $ mkdir -p logs/out + $ docker logs --since=2017-04-26T10:37:00.000000000Z _db_1 2> logs/logs.txt + ``` + Note: the time here is UTC, you can prefer to use your local time such as: `2017-04-26T12:37:00.000000000+02:00` + +5. Generate the pgBadger analysis and open it in a browser + + ``` + $ docker run --rm -v "$(pwd)/logs:/logs" -v "$(pwd)/logs/out:/data" uphold/pgbadger /logs/logs.txt + $ xdg-open logs/out/out.html + ``` diff --git a/docs/how-to-set-odoo-configuration-values.md b/docs/how-to-set-odoo-configuration-values.md new file mode 100644 index 0000000..1369382 --- /dev/null +++ b/docs/how-to-set-odoo-configuration-values.md @@ -0,0 +1,26 @@ +# How to set Odoo configuration values + +The template for the configuration (`openerp.cfg`) is in [the base Docker project image] + + +Most of the values here are set by environment variables, which looks like: + +``` +workers = { default .Env.WORKERS "4" } +``` + +In this example, the number of workers is taken from the `WORKERS` environment +variable and the default values is 4. + +Having environment variables is useful to have a different configuration +between different environments (dev, test, ...). This is why the `addons_path` +is directly set in the template and not in environment variables: we always +want the same value for all the environments. + +The values of the environment variables can be changed in the +`docker-compose.yml` file, or directly in `docker-compose run -e VARIABLE=value +odoo`. + +Read the documentation of [the base Docker project +image](https://github.com/camptocamp/docker-odoo-project) for more information +about the `DEMO` flag and other special variables. diff --git a/docs/how-to-setup-migration-project.md b/docs/how-to-setup-migration-project.md new file mode 100644 index 0000000..a55b675 --- /dev/null +++ b/docs/how-to-setup-migration-project.md @@ -0,0 +1,174 @@ +# How to setup a migration project + +Summary: + +* [Build steps](#build-steps) +* [Tools/Scripts to help the developer](#toolsscripts-to-help-the-developer) +* [Requirements/dependencies](#requirementsdependencies) + +--- + +## Build steps + +1. [Reset state for all modules](#reset-state-for-all-modules) +2. [Fix attachments path](#fix-attachments-path) +3. [Rename modules](#rename-modules) +4. [Update moved models](#update-moved-models) +5. [Update moved fields](#update-moved-fields) +6. [Install/Update all modules](#installupdate-all-modules) +7. [Uninstall modules](#uninstall-modules) +8. [Clean the database](#clean-the-database) +9. [Clean unavailable modules](#clean-unavailable-modules) + +All these steps are launched from [migration.yml](../odoo/migration.yml) +in marabunta mode `migration`. + +### Reset state for all modules + +In migrated database: + +* all modules installed in source version have the state `to upgrade` +* all new core modules to install have the state `to install` + +We can't install/update a module unavailable in target code source. + +So, we need to change the state of all modules to `uninstalled`. +And then, list in [migration.yml](../odoo/migration.yml) only modules we want to keep. + +_Implementation_: [songs/migration/pre.sql](../odoo/songs/migration/pre.sql) + +### Fix attachments path + +In migrated database, we have 2 types of attachments: + +* attachments stored directly in database in binary mode +* attachments stored in filestore + +For filestore attachments, +it's necessary to customize the file path depending of the hosting. + +With S3/SWIFT hosting, +the bucket/container name must be added at the beginning of the path. + +_Implementation_: [songs/migration/pre.py](../odoo/songs/migration/pre.py) +in function `fix_path_on_attachments`. + +### Rename modules + +Some specific/OCA modules can be renamed between source and target version. +In this case, the module metadata must be updated before launching +the update of all modules to avoid build failures or loss of data. + +You don’t need to update the models contained in the renamed modules, +when the module is renamed, all the models, fields, etc. will be renamed too. + +_Implementation_: [songs/migration/pre.py](../odoo/songs/migration/pre.py) +in function `rename_modules`. + +### Update moved models + +Some models can be moved in other modules between source and target version. +So we need to update the models metadata before launching +the update of all modules to avoid build failures or loss of data. + +_Implementation_: [songs/migration/pre.py](../odoo/songs/migration/pre.py) +in function `update_moved_models`. + +### Update moved fields + +For the fields moved in another module between source and target version, +you must update the fields metadata before launching +the update of all modules to avoid build failures or loss of data. + +_Implementation_: [songs/migration/pre.py](../odoo/songs/migration/pre.py) +in function `update_moved_fields`. + +### Install/Update all modules + +In the migration build, all modules we want to : + +* install, +* keep installed, +* update + +must be listed in [migration.yml](../odoo/migration.yml) +in section `addons/upgrade`. + +At the end of the build, +be sure that the list of installed modules is the same +for a migrated database (with marabunta mode `migration`) +than for a « from scratch » database (with marabunta mode `sample`). + +### Uninstall modules + +In target version, +if we don't want to keep some modules previously installed in source version, +we must uninstalled them to allow Odoo to remove all their metadata/data. + +The source code for these modules is not required to uninstall them. + +_Implementation_: [songs/migration/post.py](../odoo/songs/migration/post.py) +in function `uninstall_modules`. + +### Clean the database + +In migration process, a lot of metadata/data persist in database +even if the origin module have been uninstalled. + +The following data can be cleaned: + +* models +* columns/fields +* tables +* models data +* menus + +See module `database_cleanup` to see what is exactly cleaned for each item. + +_Implementation_: [songs/migration/post.py](../odoo/songs/migration/post.py) +in function `database_cleanup`. + +### Clean unavailable modules + +A lot of available modules not installed in source version +are still in the modules list of the target version, +but are now unavailable (because not updated for the new version). + +These modules must be deleted from the list of modules. + +_Implementation_: [songs/migration/post.py](../odoo/songs/migration/post.py) +in function `clean_unavailable_modules`. + +## Tools/Scripts to help the developer + +### Check fields + +In migration build, +if a field is moved from a module to another, +the column/data can be lost during the process. + +A script is available to check if the migrated database contains fields +which must be moved into another module. + +This script will help you to know which fields you must move +to be sure to not lose datas during the process +(you will add this fields in the step [Update moved fields](#update-moved-fields)). + +To launch the script, it's necessary to: + +* be in `dev` environment +* launch the build with environment variable: `MIGRATION_CHECK_FIELDS` + +:warning: **Be careful**, this script is here to help the developper, +but to be sure that no data have been lost, the best way is to test the migration. + +_Implementation_: + +* [songs/migration/pre_check_fields.py](../odoo/songs/migration/pre_check_fields.py) +* [songs/migration/post_check_fields.py](../odoo/songs/migration/post_check_fields.py) + +## Requirements/dependencies + +* Uncomment the `openupgradelib` import in [requirement.txt](../odoo/requirement.txt) +* Install the module `database_cleanup` in [migration.yml](../odoo/migration.yml) + * Repository OCA for this module is `server-tools` diff --git a/docs/how-to-test-smtp.md b/docs/how-to-test-smtp.md new file mode 100644 index 0000000..6750b91 --- /dev/null +++ b/docs/how-to-test-smtp.md @@ -0,0 +1,28 @@ +# How to test SMTP + +## MailHog + +In the development environment, a [MailHog](https://github.com/mailhog/MailHog) container is started alongside odoo. + +This service is published by default on http://localhost:8025 + +The default server environment file configuration for `dev` is configured to use this MailHog container as SMTP server. As soon as Odoo sends an email, it will be trapped in the MailHog server and can be viewed on http://localhost:8025 + +## Add setup on an existing project + +If the container does not exist on this project because it's older than this, you'll need to: + +* Add the container in docker-compose.yml +* Add the `server_environment_file` config in the `dev` profile + +Using this commit as reference: https://github.com/guewen/odoo-template/commit/0645e82ac104c444015ba54fbe9706dce4c4d2bf + +## MailTrap + +In integration and test environment, [mailtrap.io](https://mailtrap.io) is used. + +To create a new account for a project, use the email address that can be found on the specific project page (tab "Other Info") of our Odoo instance and a random password. + +The confirmation email of mailtrap.io will appear in the issues of the corresponding project. + +Don't forget to add an entry in lastpass. diff --git a/docs/how-to-use-a-prod-db-in-dev.md b/docs/how-to-use-a-prod-db-in-dev.md new file mode 100644 index 0000000..33615d5 --- /dev/null +++ b/docs/how-to-use-a-prod-db-in-dev.md @@ -0,0 +1,71 @@ +# How to use a PROD DB in DEV mode + +## Get a dump locally + +If you already have a local dump on your computer, you can skip this section. +If the project is not hosted on our cloud-platform, you should ask a dump to the support team or the project manager. + +### From odoo-dump-bag + +Connect to https://dump-bag.odoo.camptocamp.ch and retrieve the dump using `aws`. +Then skip to the next section. + +### Generate a dump + +This is to be used only in case you can't get a dump from odoo-dump-bag + +1. Connect to production replication server on odoo-platform-none-db-replication + + Make sure you're in C2C VPN and open a terminal. + + ``` + odoo-platform-none-db-replication 5555 + ``` + + This will create a connection to DB server on localhost:5555 + + * Should you need a dump from Integration, use : `odoo-platform-none-int-db 5555` + * Should you need a dump from Production, use : `odoo-platform-none-db 5555` + +2. Create and download the dump + + Open a second terminal. + + ``` + pg_dump --format=c -h localhost -p 5555 -U old_dream_6085 old_dream_6085 -O --file /path/to/backups/geo_11-$(date +%Y-%m-%d).pg + ``` + + Replace DB NAME and user with the names of your project DB. + +## Restore and use the production dump + +Now that you have a production DB dump on your computer, you want to load it so you can start a Docker composition. + +1. Create a new database : + + ``` + docker-compose run --rm odoo createdb -O odoo prod + ``` + + Should you already have a DB called prod you can either use another name or drop it using : + + ``` + docker-compose run --rm odoo dropdb prod + ``` + +2. Load the production dump : + + ``` + docker-compose run --rm odoo pg_restore -p 5432 -d prod < /path/to/backups/geo_11-$(date +%Y-%m-%d).pg + ``` + +3. Now you can start Odoo setting DB_NAME to prod : + + ``` + docker-compose run --rm -e DB_NAME=prod -p 8069:8069 odoo odoo --workers=0 + ``` + +## Notes + +* [How to work with several databases](./docker-and-databases.md#working-with-several-databases) +* [How to backup and restore a dump](./docker-and-databases.md#backup-and-restore-with-dumps) diff --git a/docs/invoke.md b/docs/invoke.md new file mode 100644 index 0000000..88190d7 --- /dev/null +++ b/docs/invoke.md @@ -0,0 +1,137 @@ +# Using automated tasks with Invoke + +This project uses `invoke` to run some automated tasks. + +First, install it with: + +```bash + +$ pip install -r tasks/requirements.txt + +``` + +You can now see the list of tasks by running at the root directory: + +```bash + +$ invoke --list + +``` + +The tasks are defined in `tasks.py`. + +## Some tasks + +### release.bump + +release.bump is used to bump the current version number: +(see [releases.md](docs/releases.md#versioning-pattern) for more informations about versionning) + +``` +invoke release.bump --feature +# or +invoke release.bump --patch +``` + +--feature will change the minor version number (eg. 9.1.0 to 9.2.0). +--patch will change the patch version number (eg 9.1.0 to 9.1.1). + +bump.release changes following files (which must be commited): + * [odoo/VERSION](../odoo/VERSION): just contains the project version number, so this version is changed. + * [HISTORY.rst](../HISTORY.rst): Rename Unreleased section with the new version number and create a new unreleased section. + * rancher/integration/docker-compose.yml: Change the version of the docker image to use for the integration stack. + +### project.sync + +Copy files (such as docs) from the +[odoo-template](https://github.com/camptocamp/odoo-template). +It should be run at a regular basis. + +``` +invoke project.sync +``` + +### translate.generate + +It generates or updates the pot translation file for an addon. +A new database will be created by the task, in which the addon will be +installed, so we guaranteed to have clean terms. + +``` +invoke translate.generate odoo/local-src/my_addon +# or +invoke translate.generate odoo/external-src/sale-workflow/my_addon + +``` + +### submodule.init + +Add git submodules from the `.gitmodules` file configuration. +Instead of using `git submodule add -b 11.0 {url}` +{path}, for every branch you need to add, you can edit the `.gitmodules` file, +add the entries you want, and run this command. + + +``` +invoke submodule.init +``` + +### submodule.list + +List submodules paths which can be directly used to directly copy-paste the +addons paths in the Dockerfile. The order depends of the order in the +.gitmodules file. + +``` +invoke submodule.list +``` + +### submodule.merges + +Generate and push a branch including the pending pull requests. + +``` +invoke submodule.merges odoo/external-src/sale-workflow +``` + + +### submodule.show-closed-prs + +Show a list of closed pull requests in the pending merges. + +``` +invoke submodule.show-closed-prs +``` + +### songs.rip + +Copy generated songs of a dj.compilation +They come as a zip file which can loaded from a local path or from an odoo url +Files will be placed according to their path in zip file with ./odoo as the root. + +When providing an url you can set authentication parameters for the odoo from which +you want to download the compilation. + +Usually songs and csv data will be copied in: + +odoo/songs/install/generated +and +odoo/data/install/generated + +See https://github.com/camptocamp/odoo-dj for more details about dj.compilation + + +``` +invoke songs.rip http://127.0.0.1:8888/dj/download/compilation/account-default-1 [--login admin] [--password admin] [--db odoodb] +# or +invoke songs.rip /tmp/songs.zip +``` + +## Custom tasks + +Alongside the core namespaces (release, project. translate), you can create +your own namespace for the need of the project. Dropping a Python file with an +invoke `@task` in the `tasks` directory will make it available in the `invoke` +commands. The namespace will be the name of the file. Let's say you add a +function named `world` decorated with `@task` in a file named `hello.py`, then +the command will be `invoke hello.world`. diff --git a/docs/odoo-platform-deployment.md b/docs/odoo-platform-deployment.md new file mode 100644 index 0000000..ad5bbad --- /dev/null +++ b/docs/odoo-platform-deployment.md @@ -0,0 +1,2 @@ +Platform documentation is on +https://confluence.camptocamp.com/confluence/display/BS/Odoo+Cloud+Platform+-+Technical diff --git a/docs/odoo-test-platform.md b/docs/odoo-test-platform.md new file mode 100644 index 0000000..8195faf --- /dev/null +++ b/docs/odoo-test-platform.md @@ -0,0 +1,2 @@ +Test Platform documentation is on +https://confluence.camptocamp.com/confluence/display/BS/Odoo+Test+Platform+-+Minions+-+Technical diff --git a/docs/prerequisites.md b/docs/prerequisites.md new file mode 100644 index 0000000..51c255d --- /dev/null +++ b/docs/prerequisites.md @@ -0,0 +1,17 @@ +# Prerequisite + +Be sure to have a recent version of docker and docker-compose. + +1. to install docker, refers to [this + documentation](https://docs.docker.com/engine/installation/linux/ubuntulinux/), + or kindly ask to someone who will be glad to help you :) + Also do the optional configuration "Create a Docker group". If it does not + work, try this command: + + sudo gpasswd -a ${USER} docker + + and restart the Docker daemon. + +2. to install docker-compose, run + + sudo pip install docker-compose diff --git a/docs/pull-requests-quality-check.md b/docs/pull-requests-quality-check.md new file mode 100644 index 0000000..2034f50 --- /dev/null +++ b/docs/pull-requests-quality-check.md @@ -0,0 +1,99 @@ +# Quality checks on pull requests + +## Performed checks + +You can put a **#trivial** marker in a pull request body to mark it as something +simple and straightforward. + +### Warnings (won't cause the build to fail) + +- PR headline contains **[WIP]** marker +- PR has a **work in progress** label on it +- Large diff (500+ lines) +- `HISTORY.rst` hasn't changed in a pull request (unless your PR is "trivial") +- you've made changes to a `.py` files in a local module, but didn't stated it + in a last migration step `odoo/migration.yml` +- `.py` files were changed, but not tests *(pretty much naive)* +- last migration version in `migration.yml` targets released version and upgrade + is advised (`.py` files were updated in a module that is absent in a last + migration step - this is a warning since it's not a mandatory to upgrade a + module after any `.py` update) + +### Errors (would cause the build to fail) + +- you've made changes to a `.xml` files in a local module, but didn't stated it + in a last migration step `odoo/migration.yml` - behaves pretty much like the + previous check, but generates an error instead. +- last migration version in `migration.yml` targets released version and upgrade + is required (`.xml` files were updated in a module that is absent in a last + migration step) + +Commit message is undergoing a "lint check" as well as the diff: +- message has to consist of more than a single word +- message has to start from a Capital Letter +- message shouldn't have a period at the end +- headline has to be separated from the body with a blank line + +### Roadmap + +- [ ] Handle mismatched remotes on submodules with pending merges + + Currently, Dangerfile features a fancy check for mismatched remotes in a + submodules with pending merges in 'em in order to be able to see which of + those an author should alter w/o having to dig into Travis build logs + (possibly multiple times, since Travis reveals one broken remote at a time). + Though, I've never had a chance to actually observe the outcome on a live PR, + since current submodule remote check is executed slightly before Danger is + meant to be run and, as was stated before, is quite paranoid and fails badly + in the case that Danger should handle as well - so we'll need to teach those + guys how to play well w/ each other. + +- [ ] Enhance `migration.yml` check for `.py` files + + I.e., it's safe to say that if `.py` diff contains something like + ```diff + - something = fields.Type(whatever) + + something = fields.Type(altered whatever) + ``` + then a module that holds a modified file should be present in a last migration. + Currently, a facilities to get a last migration step's upgrade lists and/or + extracting module name from a modified file name have been implemented (though + it might be better to have them extracted to functions) + +- [ ] Teach last migration step getter to feel itself comfortable in various `migration.yml` structures + + Currently, it's designed to work in a structure similar to this: + ```yaml + migration: + versions: + v1: + addons: + - ... + v1: + addons: + - ... + ``` + Imagine having different upgrade lists for different Marabunta modes, like: + ```yaml + migration: + versions: + v1: + modes: + full: + addons: + - ... + sample: + addons: + - ... + ``` + and so on. It'll work in most cases, but won't be able to get through + additional `modes/` wraps at the moment. + Preventive countermeasures were made, so Danger should operate more or less + fine, except for `migration.yml` check itself in such cases. + +- [ ] Track changes in updated submodules + + We can bug people to upgrade previously installed modules with changes in the + upstream. + +- [ ] Ensure that every module mentioned in migration steps is present in `setup` step diff --git a/docs/pull-requests.md b/docs/pull-requests.md new file mode 100644 index 0000000..e4a63bc --- /dev/null +++ b/docs/pull-requests.md @@ -0,0 +1,21 @@ +# Pull requests and reviews + +Pull requests for new features are always proposed on `master`. + +There are two exceptions for this rule: + +* [patches for production releases](./releases.md#Versioning pattern) are created on dedicated branches using the naming scheme `patch-x.y.z`. +* large developments that must be developed aside the main branches during some time or branches that need consolidation before being merged in `master` may be done in separate `feature-xxx` branches. + +In both cases, you should ask to the [Release Master](./releases.md#Release master) to create the required branch. + +The pull requests must conform to the following points to be merged: + +* The Travis build must be green + * pep8 is checked + * tests of local addons pass +* It contains tests +* The changelog is updated (one or a few lines added in the + `unreleased` section of [HISTORY.rst](../HISTORY.rst)) +* The [upgrade scripts](./upgrade-scripts.md) have been updated +* PR [automated quality checks](./pull-requests-quality-check.md) are passing diff --git a/docs/releases.md b/docs/releases.md new file mode 100644 index 0000000..3cea640 --- /dev/null +++ b/docs/releases.md @@ -0,0 +1,166 @@ +# Releases + +## Release process + +In the following order, at the end of a sprint, the release manager will: + +* Merge all pending pull requests when possible, and for each corresponding card in Jira set the "Fix Version" field accordingly as well as change the status to "Waiting deploy" + +* Ensure that the migration scripts are complete and working (see [upgrade-scripts.md](upgrade-scripts.md#run-a-version-upgrade-again) on how to execute a specific version scripts). + +* For projects already used in Production, ensure migration is working in FULL mode. (see [how-to-use-a-prod-db-in-dev.md](how-to-use-a-prod-db-in-dev.md) to get a dump for projects hosted on cloud platform). + +* Increase the version number (see [invoke.md](invoke.md#releasebump) for more information) + + ```bash + invoke release.bump --feature # increment y number in x.y.z + # or --patch to increment z number in x.y.z + ``` + +* The "bump" command also pushes the pending-merge branches to a new branch named after the tag (`pending-merge--`), if needed, this push can be manually called again with + + ```bash + invoke release.push-branches + ``` + +* Do the verifications: migration scripts, [changelog](../HISTORY.rst) (remove empty sections, ...) + +* Commit the changes to [changelog](../HISTORY.rst), VERSION, ... on master with message 'Release x.y.z' + +* Add a tag with the new version number, copying the changelog information in the tag description + + ``` + git tag -a x.y.z # here, copy the changelog in the annotated tag + git push --tags && git push + ``` + +When the tag is pushed on GitHub, Travis will build a new Docker image (as +long as the build is green!) and push it on the registry as `camptocamp/geo_11_odoo:x.y.z` + +If everything went well it is worth informing the project manager that a new release is ready to be tested on the Minions. + +## Versioning pattern + +The version is in the form `x.y.z` where: + +* **x** is the major number, always equal to the Odoo version (9.x.z) +* **y** is the minor number, incremented at the end of each sprint, this is + were new features are added +* **z** is the patch number, incremented for corrections on production releases. + +All the developments are done on the `master` branch and a new release on +`master` implies a new `minor` version increment. +When there is an issue with a released image after the tag has been set, a +patch branch is created from the tag and a new release is done from this +branch; the patch number is incremented. + +## Patch process + +### Quick checklist + +1. Create a branch `patch-x.y.next-z` based from the source tag `x.y.z` to + correct +2. Push the branch to the camptocamp repository +3. Create a working branch based on `patch-x.y.next-z` which include your fix +4. Push your working branch on your fork repository and create a pull request + on `patch-x.y.next-z` +5. Ask for reviews, get reviewed, merge +6. Create a release `x.y.next-z` +7. **Merge the branch `patch-x.y.next-z` in master (very important to have the + fix for the next release!)** + +You can refer to the more detailed documentation below. + +### Short story + +Example of branches involving Paul as the Release manager and Liza and Greg as +developers, the current version is `9.3.2`: + +* Liza works on a new feature so she creates a branch for master: + + ``` + git checkout origin/master -b impl-stock-split + git push liza/impl-stock-split + ``` + +* Greg works on a new feature too: + ``` + git checkout origin/master -b impl-crm-claim-email + git push greg/impl-crm-claim-email + ``` +* The end of sprint is close, both propose their branches as pull requests in + `master`, builds are green! +* Paul merges the pull requests, prepares a new release and when he's done, he + tags `master` with `9.4.0` +* Paul tests the image `camptocamp/geo_11_odoo:9.4.0` and oops, it seems he + goofed as the image doesn't even start +* Paul corrects the - hopefully - minor issue and prepare a new release for + `9.4.1`. +* Liza works on another shiny feature: + ``` + git checkout origin/master -b impl-blue-css + git push liza/impl-blue-css + ``` +* And Greg is assigned to fix a bug on the production server (now in `9.4.1`), + so he will do 2 things: + * create a patch branch **from** the production version: + ``` + git checkout 9.4.1 -b patch-claim-typo + git push greg/patch-claim-typo + ``` + * ask Paul to create a new patch branch `patch-9.4.2`, on which he will + propose his pull request +* Paul prepare a new release on the `patch-9.4.2` branch. Once released, Paul merges `patch-9.4.2` in `master`. +* At the end of the sprint, Paul prepares the next release `9.5.0` with the new Liza's feature and so on. + +### Detailed instruction + +1. Create remote patch branch + + * Figure out target version `number` in format `x.y.z` (`9.4.0`) for which + you need to make patch. It can be version released on production or + integration or specified by PM. Or maybe other case. + * Create patch branch from target with name `patch-x.y.next_z` + (`patch-9.4.1`). Where `next_z = z + 1` (`1 = 0 + 1`). + ```git + git checkout x.y.z -b patch-x.y.next_z + ``` + example + ```git + git checkout 9.4.0 -b patch-9.4.1 + ``` + * Push new empty branch to repo. + ```git + git push origin patch-x.y.next_z + ``` + example + ```git + git push origin patch-9.4.1 + ``` + Where `origin` should be camptocamp project repo + + Alternative you can create `patch-x.y.next_z` branch on github directly. + +2. Create local patch branch + + * Create your local patch branch. + * Do required changes for patch. + * Commit your changes. + * Push your changes to your remote fork repo. + * Create PR to patch remote project branch. + ```git + camptocamp/geo_11/patch-x.y.next_z <- /geo_11/ + ``` + * Request PR review in chat to speedup merge. + * Merge after reviewers approve your changes. + +3. Create patch release + + * After merge your PR to remote patch-branch do release. + Your current branch should be `patch-x.y.next_z`. + See [Release process](#release-process) section. + +4. **Merge patch-branch to master** + + * **Merge `patch-x.y.next_z` to `master` to incorporate patch changes + into master branch.** diff --git a/docs/structure.md b/docs/structure.md new file mode 100644 index 0000000..23c8a47 --- /dev/null +++ b/docs/structure.md @@ -0,0 +1,37 @@ +# Structure + +At the root level, there are mainly files related to docker-compose, Travis, Rancher and the project documentation. + +When we build a Docker image, all the things below are copied *inside* the image. When developing, they are shared with volumes so we can work without having to make a new build every time we modify one file. + +The implementation of odoo is in the [subfolder odoo](../odoo) which is broken down as: + +**[Dockerfile](../odoo/Dockerfile)** + +The Dockerfile used to build the Odoo custom image for this project. +It can be customized to [install dependencies](./how-to-add-dependency.md) + +**[data/](../odoo/data)** + +Directory used to hold files that'll be used by scenario / upgrade scripts to load data in Odoo. It might be images, CSV files, ... The files concerning the installation go in `install`, the ones used for loading a sample dataset go in `sample`. + +**[songs/](../odoo/songs)** + +Contains the setup and upgrade stripts used for the setup and the migration of the databases. +It itself contains the following subdirectories: **install** which holds the installation scripts, **sample** for the steps used only when using an excerpt of the data for dev/test and **upgrade** will be used for migration steps between releases after the go-live. + +**[src/](../odoo/src)** + +This is where the Odoo (/OCA) source code is located. +That's a git submodule. + +**[external-src/](../odoo/external-src)** + +This is where external addons repositories (such as OCA's ones) are added using +git submodules. Each new repository must be added to the `Dockerfile` , see +[How to add a new addons repository](./docs/how-to-add-repo.md). + +**[local-src/](../odoo/local-src)** + +Customization addons local to this customer are stored here, directly in this +git repository. diff --git a/docs/upgrade-scripts.md b/docs/upgrade-scripts.md new file mode 100644 index 0000000..325d4e3 --- /dev/null +++ b/docs/upgrade-scripts.md @@ -0,0 +1,245 @@ +# Upgrade scripts + +## Stack + +The upgrade system uses the following tools: + +* [Marabunta](https://github.com/camptocamp/marabunta) +* [Anthem](https://github.com/camptocamp/anthem) + +Marabunta is an orchestrator for running the migrations. Anthem runs Python +scripts provided with an Odoo environment. More details below. + +Those tools are integrated in the image and can be configured using environment +variables. See the [Docker Odoo Project +readme](https://github.com/camptocamp/docker-odoo-project/#environment-variables) +for details on the variables. + +## Marabunta + +Marabunta expects a file named `migration.yml` in the `odoo` directory. +This `yaml` file contains the upgrade instructions for every version. +It creates a `marabunta_version` table in the database, containing the +installed versions, and run the upgrade scripts when a version is missing. + +The upgrade instructions are composed of: + * A list of commands executed before the installation/upgrade of the addons + * The installation/upgrade of a list of addons + * A list of commands executed after the installation/upgrade of the addons + +Further, we can configure *modes* which allow to run additional commands when a +mode is activated (e.g. the 'sample' mode would load some sample data). + +See [an example of yaml +file](https://github.com/camptocamp/marabunta/blob/master/marabunta/parser.py#L14-L61) + +## Anthem + +Anthem is a replacement for `oerpscenario`, but the scripts are written with +straight Python. + +Simple example: + +```python + +import anthem +from anthem.lyrics.records import create_or_update + + +@anthem.log +def setup_company(ctx): + """ Configuring company """ + company = ctx.env.ref('base.main_company') + company.name = 'Rainbow Holding' + + +@anthem.log +def create_partners(ctx): + """ Creating partners """ + names = [('Khiank Mountaingut', '__scenario.partner_1'), + ('Kher Fernthorn', '__scenario.partner_2'), + ('Sheing Coaldigger' '__scenario.partner_3'), + ] + for name, xmlid in names: + create_or_update('res.partner', xmlid, {'name': name}) + + +def main(ctx): + setup_company(ctx) + create_partners(ctx) + +``` + +As you can see, each function takes a `ctx` argument. The `ctx` contains an +Odoo environment (`env`) which can be used exactly as we would do in Odoo. + +Also, Anthem includes predefined functions for the common tasks (load csv +files, upsert a record with a xmlid, ...), called `lyrics`. + +## Use cases + +### Calling a song in migration.yml + +Anthem takes the function we want to run as argument. +The syntax of the argument is `module-path::function-name`, where *module-path* +is the Python path of the module. So let's say we have a `main` function in +`songs/install/post.py`, the argument would be: `songs.install.post::main`. + +In the `migration.yml` file, it means we have to add a pre/post operation in +the current version: + +```yaml + - version: 11.0.0 + operations: + post: + - anthem songs.install.post::main +``` + +### Run a single Anthem's song + +As demonstrated in the previous section, anthem takes the function we want to +run as argument. So we can run a container the command line to execute the +desired function: + +``` +$ docker-compose run --rm odoo anthem songs.sample.data_sample::create_partners +``` + +### Run a version upgrade again + +By default, Marabunta won't execute a migration upgrade twice. +You can force it to execute an upgrade again with the `MARABUNTA_FORCE_VERSION` +environment variable: + +``` +$ docker-compose run --rm -e MARABUNTA_FORCE_VERSION=11.0.0 odoo +``` + +With the command above, odoo will be run at the end of the migration. +You could also run only `migrate` which will exit the container once done. + +``` +$ docker-compose run --rm -e MARABUNTA_FORCE_VERSION=11.0.0 odoo migrate +``` + +### Execute the upgrade for a given mode + +Modes allow to run additional commands. + +```yaml + versions: + - version: 11.0.0 + operations: + pre: + - anthem songs.install.pre::main + post: + - anthem songs.install.post::main + addons: + upgrade: + - account + modes: + full: + operations: + pre: + - anthem songs.install.pre_full::main + post: + - anthem songs.install.data_full::main + sample: + operations: + post: + - anthem songs.sample.data_sample::main + addons: + upgrade: + - dev_addon +``` + +In the example above, we have 2 modes: `full` and `sample`. We use them to spawn +different types of instances, `sample` might be used for a development instance +or a test server, the full one for the production (possibly with a lot of +data). + +Mode's commands are always executed after the main ones, the addons lists are merged. +The order of execution when no mode is used will be: + +1. `anthem songs.install.pre::main` +2. `-i account` (or `-u` if it is already installed) +3. `anthem songs.install.post::main` + +When the sample mode is used (`MARABUNTA_MODE=sample`): + +1. `anthem songs.install.pre::main` +2. `-i account,dev_addon` (or `-u` if it is already installed) +3. `anthem songs.install.post::main` +4. `anthem songs.sample.data_sample::main` + +When the full mode is used (`MARABUNTA_MODE=full`): + +1. `anthem songs.install.pre::main` +1. `anthem songs.install.pre_full::main` +2. `-i account` (or `-u` if it is already installed) +3. `anthem songs.install.post::main` +4. `anthem songs.install.data_full::main` + +Usually, the `MARABUNTA_MODE` will be set in the `docker-compose.yml` +composition files, but you can also set in when running a container: + +``` +$ docker-compose run --rm -e MARABUNTA_MODE=sample odoo +``` + +### Disable the migration + +When you don't want the migration to run at all, you can disable it with: + +``` +$ docker-compose run --rm -e MIGRATE=False odoo +``` + +### Upgrade all modules + +If you upgrade `odoo/src` and any other `odoo/external-src/*` repos, +you might want to update all the installed modules. +You should just declare `base` in the addons section, like this: + +```yaml + versions: + - version: 11.0.1 + addons: + upgrade: + - base +``` + +### Load heavy files + +If you have to import huge files (eg: stock.location) +you should delegate import to `importer.sh`. + +```python +@anthem.log +def setup_locations(ctx): + deferred_import( + ctx, + 'stock.location', + 'data/install/stock.location.csv', + defer_parent_computation=True) +[...] +@anthem.log +def location_compute_parents(ctx): + deferred_compute_parents(ctx, 'stock.location') +``` + +```yaml +modes: + full: + operations: + post: + - anthem songs.install.data_full::main + #### import heavy stuff + - importer.sh songs.install.inventory::setup_locations /odoo/data/install/stock.location.csv + - anthem songs.install.inventory::location_compute_parents +``` + +#### WARNING + +If you want to import records w/ many parent/children relations (like product categories) it might fail. +The import is done in parallel so is not granted that you'll have parents imported before children. ATM ;) diff --git a/odoo/.dockerignore b/odoo/.dockerignore new file mode 100644 index 0000000..8877048 --- /dev/null +++ b/odoo/.dockerignore @@ -0,0 +1,11 @@ +**/.git/**/* +**/.git +**/.gitignore +**/.travis.yml +**/LICENSE +**/*.pyc +**/README.md +**/i18n/*.po +# installable lang +!**/i18n/de.po +!**/i18n/fr.po diff --git a/odoo/Dockerfile b/odoo/Dockerfile new file mode 100644 index 0000000..8aa3cb6 --- /dev/null +++ b/odoo/Dockerfile @@ -0,0 +1,76 @@ +FROM camptocamp/odoo-project:11.0-3.0.0 +MAINTAINER Camptocamp + +# Steps are grouped by dependency +# and ordered by compromizing cost and frequency + +## Base requirements (Libs and packages) +# frequency: rarely +# cost: high + +COPY ./*requirements.txt /odoo/ +# Install additional debian and python packages if needed +RUN set -x; \ + apt-get update \ + && apt-get install -y --no-install-recommends \ + # parallel libmagic1 + # if you need some dev packages for python packages, you need to clean them afterwards + python3-dev build-essential \ + && cd /odoo \ + && find . -maxdepth 1 -name "*requirements.txt" ! -name src_requirements.txt ! -name base_requirements.txt | xargs -I{} pip install -r {} \ + # cleaning of dev packages + && apt-get remove -y build-essential python-dev \ + && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false -o APT::AutoRemove::SuggestsImportant=false \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + + +## Tools of odoo-template +# frequency: seldom +# cost: light + +# Entrypoints +COPY ./before-migrate-entrypoint.d/* /before-migrate-entrypoint.d/ +COPY ./start-entrypoint.d/* /start-entrypoint.d/ +RUN chmod +x /before-migrate-entrypoint.d/* \ + && chmod +x /start-entrypoint.d/* +# CSV Loader +# `parallel` + `importer.sh` are needed to load heavy files +COPY ./bin/importer.sh /odoo-bin/ + +## Prepare pip install +# frequency: never +# cost : very light +COPY ./setup.py /odoo/ + +## Main activity +# frequency: always +# costs: very high to very light +COPY ./src /odoo/src +COPY ./external-src /odoo/external-src +COPY ./local-src /odoo/local-src +COPY ./data /odoo/data +COPY ./songs /odoo/songs +COPY ./VERSION /odoo/ +COPY ./migration.yml /odoo/ + +# depends on ./setup.py ./src, ./songs and ./VERSION +RUN cd /odoo && pip install -r src_requirements.txt + + +## PLATFORM: Add this line to ADDONS_PATH if hosting is on cloud platform +# /odoo/external-src/odoo-cloud-platform, \ + +## Environment +# frequency: sometimes +# cost: very light +ENV ADDONS_PATH="/odoo/external-src/enterprise, \ + /odoo/external-src/server-tools, \ + /odoo/external-src/geospatial, \ + /odoo/external-src/web, \ + /odoo/src/addons, \ + /odoo/local-src" \ + LIMIT_TIME_CPU=86400 \ + LIMIT_TIME_REAL=86400 \ + LIMIT_MEMORY_SOFT=1342177280 \ + LIMIT_MEMORY_HARD=1610612736 diff --git a/odoo/VERSION b/odoo/VERSION new file mode 100644 index 0000000..275283a --- /dev/null +++ b/odoo/VERSION @@ -0,0 +1 @@ +11.0.0 diff --git a/odoo/before-migrate-entrypoint.d/000_dev_set_db_storage b/odoo/before-migrate-entrypoint.d/000_dev_set_db_storage new file mode 100755 index 0000000..f8af473 --- /dev/null +++ b/odoo/before-migrate-entrypoint.d/000_dev_set_db_storage @@ -0,0 +1,57 @@ +#!/bin/bash +# +# Often in development mode, we retrieve a database from a production server +# using 'attachment_s3' (cloud platform). This script automatically changes +# the attachments to be stored in the database because there is no S3 bucket +# when working in dev. +# +# When one needs to work on S3 with a development instance, he might set the +# ODOO_DEV_KEEP_ATTACHMENT_S3=true environment variable +# +keep_location=$(echo "${ODOO_DEV_KEEP_ATTACHMENT_S3}" | tr '[:upper:]' '[:lower:]' ) + +if [ "$keep_location" = "true" ]; then + exit 0 +fi + +if [ "$RUNNING_ENV" = "dev" ] ; then + + if [ "$( psql -tAc "SELECT 1 FROM pg_database WHERE datname='$DB_NAME'" )" != '1' ] + then + echo "Database does not exist, ignoring script" + exit 0 + fi + + + echo "Deleting web attachment when ir_attachment.location parameter is 's3' or 'swift'" + + psql << EOF + + DELETE FROM ir_attachment + WHERE url like '/web/content/%' + AND EXISTS ( + SELECT value + FROM ir_config_parameter + WHERE key = 'ir_attachment.location' + AND value in ('s3', 'swift') + ); + +EOF + + echo "Setting ir_attachment.location parameter to 'db' if the current is 's3' or 'swift'" + + psql << EOF + + UPDATE ir_config_parameter + SET value = 'db' + WHERE key = 'ir_attachment.location' + AND EXISTS ( + SELECT value + FROM ir_config_parameter + WHERE key = 'ir_attachment.location' + AND value in ('s3', 'swift') + ); + +EOF + +fi diff --git a/odoo/before-migrate-entrypoint.d/001_dev_reset_users_pwd b/odoo/before-migrate-entrypoint.d/001_dev_reset_users_pwd new file mode 100644 index 0000000..040a1e7 --- /dev/null +++ b/odoo/before-migrate-entrypoint.d/001_dev_reset_users_pwd @@ -0,0 +1,24 @@ +#!/bin/bash + +# +# Often in development mode, we retrieve a database from a production server +# and each users have their own password. +# Here we force all of them equal to usernames. + +if [ "$RUNNING_ENV" = "dev" ] ; then + + if [ "$( psql -tAc "SELECT 1 FROM pg_database WHERE datname='$DB_NAME'" )" != '1' ] + then + echo "Database does not exist, ignoring script" + exit 0 + fi + + echo "Setting users' passwords to their usernames." + + psql << EOF + + UPDATE res_users SET password = login; + +EOF + +fi diff --git a/odoo/before-migrate-entrypoint.d/900_deprecate_demo_to_sample b/odoo/before-migrate-entrypoint.d/900_deprecate_demo_to_sample new file mode 100644 index 0000000..bf01633 --- /dev/null +++ b/odoo/before-migrate-entrypoint.d/900_deprecate_demo_to_sample @@ -0,0 +1,16 @@ +#!/bin/bash + +# +# Display a warning and action to do to deprecate the +# MARABUNTA_MODE demo +# The rationale is that marabunta demo is confusing with +# odoo demo +# Here we warn only the developper, no need to spam logs +# in other environments + +if [ "$RUNNING_ENV" == "dev" -a "$MARABUNTA_MODE" == "demo" ] ; then + + echo -e "\033[0:33mDEPRECATED\033[0m: MARABUTA_MODE option 'demo' is deprecated to be replaced by 'sample'." + echo -e " Please run \033[0:37minvoke deprecate.demo-to-sample\033[0m to autofix" + +fi diff --git a/odoo/before-migrate-entrypoint.d/901_migrate_marabunta_version b/odoo/before-migrate-entrypoint.d/901_migrate_marabunta_version new file mode 100755 index 0000000..f995ac1 --- /dev/null +++ b/odoo/before-migrate-entrypoint.d/901_migrate_marabunta_version @@ -0,0 +1,41 @@ +#!/bin/bash +# This file has been generated with 'invoke project.sync'. +# Do not modify. Any manual change will be lost. +# Please propose your modification on +# https://github.com/camptocamp/odoo-template instead. + + +# Marabunta now supports 5 digits versions and the 1st initial version must be "setup" + +if [ "$( psql -tAc "SELECT 1 FROM pg_database WHERE datname='$DB_NAME'" )" != '1' ] +then + echo "Database does not exist, ignoring script" + exit 0 +fi + +echo "Checking 1st marabunta version." + +if [ "$( psql -tAc "SELECT 1 FROM marabunta_version WHERE number='setup'" )" != '1' ] +then + + echo "Setting 1st marabunta version to 'setup'" + + # inject "setup" version before the 1st one + psql << EOF + + INSERT INTO marabunta_version ( + number, + date_start, + date_done, + log + ) + VALUES ( + 'setup', + current_timestamp, + current_timestamp, + 'INSERTED by before-migrate-entrypoint.d/901_migrate_marabunta_version' + ) + +EOF + +fi diff --git a/odoo/bin/importer.sh b/odoo/bin/importer.sh new file mode 100644 index 0000000..4e92d9b --- /dev/null +++ b/odoo/bin/importer.sh @@ -0,0 +1,56 @@ +#!/bin/sh +# This script can be use to import big csv file +# He splits the csv file and run multiple anthem (one by processor) +# WARNING: You can't use it if the imported model as foreign key on itself + +# Usage: importer.sh anthem_command csv_file_path +set -e + +PROC=`nproc --all` + +ANTHEM_ARG=$1 +if [ -z $1 ]; then + echo "Please provide a file path and the anthem arg" + exit 1; +fi + + +DATA_PATH=$2 +if [ -z $2 ] || [ ! -e $DATA_PATH ]; then + echo "Unable to find the data file $2"; + exit 1; +fi; + +IMPORTER_DIR=/var/tmp/importer +SPLIT_DIR=${IMPORTER_DIR}/`basename $DATA_PATH`_split +mkdir -p $SPLIT_DIR + +cd ${IMPORTER_DIR} + +# Split the csv file in $PROC files +CSV_HEADER=$(sed 1q "$DATA_PATH") +split -d -n l/$PROC ${DATA_PATH} ${SPLIT_DIR}/ +# Add missing CSV header +for file in `find ${SPLIT_DIR} -type f`; do + # Split in smaller chunks of 500 maximum + SUB_SPLIT_DIR=${file}_split + mkdir $SUB_SPLIT_DIR + split -d -l 500 ${file} ${SUB_SPLIT_DIR}/ + rm $file + for sub_file in `find ${file}_split -type f`; do + if [ $CSV_HEADER != "`sed 1q $sub_file`" ]; then + sed -i "1i\\$CSV_HEADER" $sub_file; + fi + done +done + +# Import splitted files with parallel +START_TIME=$(date +%s) +ls -1 $SPLIT_DIR | parallel -j $PROC DATA_DIR=${SPLIT_DIR}/{} anthem $ANTHEM_ARG --no-xmlrpc + +echo "Parallel total loading data: $(($(date +%s) - $START_TIME))s" + + +if [ $? -eq 0 ]; then + rm ${IMPORTER_DIR} -r +fi diff --git a/odoo/data/images/README.md b/odoo/data/images/README.md new file mode 100644 index 0000000..6f0c755 --- /dev/null +++ b/odoo/data/images/README.md @@ -0,0 +1 @@ +Images for the songs diff --git a/odoo/data/install/README.md b/odoo/data/install/README.md new file mode 100644 index 0000000..fc88043 --- /dev/null +++ b/odoo/data/install/README.md @@ -0,0 +1 @@ +Data files for the installation songs diff --git a/odoo/data/sample/README.md b/odoo/data/sample/README.md new file mode 100644 index 0000000..1bfbfc2 --- /dev/null +++ b/odoo/data/sample/README.md @@ -0,0 +1 @@ +Demo files for the songs diff --git a/odoo/data/sample/customers.csv b/odoo/data/sample/customers.csv new file mode 100644 index 0000000..00ec6f7 --- /dev/null +++ b/odoo/data/sample/customers.csv @@ -0,0 +1,2 @@ +id,name,street,zip,city +__setup__.customer_sample_0001,Berilac Sackville,59 Maidstone Road,HA0 4NU,WEMBLEY diff --git a/odoo/external-src/.gitkeep b/odoo/external-src/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/odoo/external-src/geospatial b/odoo/external-src/geospatial new file mode 160000 index 0000000..600ff40 --- /dev/null +++ b/odoo/external-src/geospatial @@ -0,0 +1 @@ +Subproject commit 600ff402e3246d52f932bcaf924ffed3893d7f51 diff --git a/odoo/external-src/server-tools b/odoo/external-src/server-tools new file mode 160000 index 0000000..829d2e6 --- /dev/null +++ b/odoo/external-src/server-tools @@ -0,0 +1 @@ +Subproject commit 829d2e627b83b54100d929f25d50142e76bf91e4 diff --git a/odoo/external-src/web b/odoo/external-src/web new file mode 160000 index 0000000..bdaaa6a --- /dev/null +++ b/odoo/external-src/web @@ -0,0 +1 @@ +Subproject commit bdaaa6a3de2c1f41e941f738a0143b7947b0b217 diff --git a/odoo/local-src/camptocamp_tools/__init__.py b/odoo/local-src/camptocamp_tools/__init__.py new file mode 100644 index 0000000..91c5580 --- /dev/null +++ b/odoo/local-src/camptocamp_tools/__init__.py @@ -0,0 +1,2 @@ +from . import controllers +from . import models diff --git a/odoo/local-src/camptocamp_tools/__manifest__.py b/odoo/local-src/camptocamp_tools/__manifest__.py new file mode 100644 index 0000000..c5548af --- /dev/null +++ b/odoo/local-src/camptocamp_tools/__manifest__.py @@ -0,0 +1,20 @@ + +# Copyright 2018 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +{'name': 'Camptocamp tools', + 'description': "Camptocamp tools and version controller.", + 'version': '11.0.1.0.0', + 'author': 'Camptocamp', + 'license': 'AGPL-3', + 'category': 'Others', + 'depends': [ + 'base', + 'web' + ], + 'website': 'http://www.camptocamp.com', + 'data': [ + 'templates/camptocamp_version_template.xml', + 'views/camptocamp_version.xml' + ], + 'installable': True, + } diff --git a/odoo/local-src/camptocamp_tools/controllers/__init__.py b/odoo/local-src/camptocamp_tools/controllers/__init__.py new file mode 100644 index 0000000..4866d37 --- /dev/null +++ b/odoo/local-src/camptocamp_tools/controllers/__init__.py @@ -0,0 +1 @@ +from . import camptocamp_version diff --git a/odoo/local-src/camptocamp_tools/controllers/camptocamp_version.py b/odoo/local-src/camptocamp_tools/controllers/camptocamp_version.py new file mode 100644 index 0000000..f45c30a --- /dev/null +++ b/odoo/local-src/camptocamp_tools/controllers/camptocamp_version.py @@ -0,0 +1,25 @@ + +# Copyright 2018 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +from odoo import http, _ +from odoo.http import request +from odoo.exceptions import UserError + + +class CamptocampVersionController(http.Controller): + + @http.route('/web/camptocamp/tools/versions', type='http', auth='user', + website=False) + def camptocamp_versions(self, *args, **kwargs): + if not request.env.user.has_group('base.group_no_one'): + raise UserError(_( + "Only users with Technical Features activated are allowed.")) + sql = """SELECT number, date_done + FROM marabunta_version + ORDER BY date_done DESC;""" + request.env.cr.execute(sql) + res = request.env.cr.dictfetchall() + values = {'versions': res} + return request.render('camptocamp_tools.camptocamp_versions_template', + values) diff --git a/odoo/local-src/camptocamp_tools/models/__init__.py b/odoo/local-src/camptocamp_tools/models/__init__.py new file mode 100644 index 0000000..aaf38a1 --- /dev/null +++ b/odoo/local-src/camptocamp_tools/models/__init__.py @@ -0,0 +1 @@ +from . import ir_attachment diff --git a/odoo/local-src/camptocamp_tools/models/ir_attachment.py b/odoo/local-src/camptocamp_tools/models/ir_attachment.py new file mode 100644 index 0000000..004cdb9 --- /dev/null +++ b/odoo/local-src/camptocamp_tools/models/ir_attachment.py @@ -0,0 +1,25 @@ + +# Copyright 2018 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +from odoo import models, api, SUPERUSER_ID +from ..utils import install_trgm_extension, create_index + + +class IrAttachment(models.Model): + _inherit = 'ir.attachment' + + @api.model_cr + def init(self): + """ Add index on ir_attachment.url to speed up the initial request + made each time a page is (re)loaded : + `select id from ir_attachment where url like '/web/content%'` + """ + env = api.Environment(self._cr, SUPERUSER_ID, {}) + trgm_installed = install_trgm_extension(env) + self._cr.commit() + + if trgm_installed: + index_name = 'ir_attachment_url_trgm_index' + create_index(self._cr, index_name, self._table, + 'USING gin (url gin_trgm_ops)') diff --git a/odoo/local-src/camptocamp_tools/templates/camptocamp_version_template.xml b/odoo/local-src/camptocamp_tools/templates/camptocamp_version_template.xml new file mode 100644 index 0000000..97eae6c --- /dev/null +++ b/odoo/local-src/camptocamp_tools/templates/camptocamp_version_template.xml @@ -0,0 +1,46 @@ + + + + + + diff --git a/odoo/local-src/camptocamp_tools/utils.py b/odoo/local-src/camptocamp_tools/utils.py new file mode 100644 index 0000000..8dd51ee --- /dev/null +++ b/odoo/local-src/camptocamp_tools/utils.py @@ -0,0 +1,57 @@ + +# Copyright 2017 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +import logging + +_logger = logging.getLogger(__name__) + + +def create_index(cr, index_name, table, expression): + cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', + (index_name,)) + if not cr.fetchone(): + cr.execute('CREATE INDEX %s ' + 'ON %s %s' % (index_name, table, expression)) + + +def is_postgres_superuser(env): + env.cr.execute("SHOW is_superuser;") + superuser = env.cr.fetchone() + return superuser is not None and superuser[0] == 'on' or False + + +def trgm_extension_exists(env): + env.cr.execute(""" + SELECT name, installed_version + FROM pg_available_extensions + WHERE name = 'pg_trgm' + LIMIT 1; + """) + + extension = env.cr.fetchone() + if extension is None: + return 'missing' + + if extension[1] is None: + return 'uninstalled' + + return 'installed' + + +def install_trgm_extension(env): + extension = trgm_extension_exists(env) + if extension == 'missing': + _logger.warning('To use pg_trgm you have to install the ' + 'postgres-contrib module.') + elif extension == 'uninstalled': + if is_postgres_superuser(env): + env.cr.execute("CREATE EXTENSION IF NOT EXISTS pg_trgm;") + return True + else: + _logger.warning('To use pg_trgm you have to create the ' + 'extension pg_trgm in your database or you ' + 'have to be the superuser.') + else: + return True + return False diff --git a/odoo/local-src/camptocamp_tools/views/camptocamp_version.xml b/odoo/local-src/camptocamp_tools/views/camptocamp_version.xml new file mode 100644 index 0000000..677b7bc --- /dev/null +++ b/odoo/local-src/camptocamp_tools/views/camptocamp_version.xml @@ -0,0 +1,10 @@ + + + + Camptocamp Versions + ir.actions.act_url + self + /web/camptocamp/tools/versions + + + diff --git a/odoo/local-src/camptocamp_website_tools/__init__.py b/odoo/local-src/camptocamp_website_tools/__init__.py new file mode 100644 index 0000000..dc3dc10 --- /dev/null +++ b/odoo/local-src/camptocamp_website_tools/__init__.py @@ -0,0 +1,2 @@ +from . import controllers +from .hooks import post_init_hook, uninstall_hook diff --git a/odoo/local-src/camptocamp_website_tools/__openerp__.py b/odoo/local-src/camptocamp_website_tools/__openerp__.py new file mode 100644 index 0000000..ae94bed --- /dev/null +++ b/odoo/local-src/camptocamp_website_tools/__openerp__.py @@ -0,0 +1,21 @@ + +# Copyright 2018 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +{ + 'name': 'Camptocamp Website', + 'description': "Camptocamp improvements for Website.", + 'version': '11.0.1.0.0', + 'author': 'Camptocamp', + 'license': 'AGPL-3', + 'category': 'Others', + 'depends': [ + 'base', + 'website', + ], + 'website': 'http://www.camptocamp.com', + 'data': [], + 'installable': True, + 'auto_install': True, + 'post_init_hook': 'post_init_hook', + 'uninstall_hook': 'uninstall_hook', +} diff --git a/odoo/local-src/camptocamp_website_tools/controllers/__init__.py b/odoo/local-src/camptocamp_website_tools/controllers/__init__.py new file mode 100644 index 0000000..119c0b8 --- /dev/null +++ b/odoo/local-src/camptocamp_website_tools/controllers/__init__.py @@ -0,0 +1 @@ +from . import disable_website_info diff --git a/odoo/local-src/camptocamp_website_tools/controllers/disable_website_info.py b/odoo/local-src/camptocamp_website_tools/controllers/disable_website_info.py new file mode 100644 index 0000000..c794f61 --- /dev/null +++ b/odoo/local-src/camptocamp_website_tools/controllers/disable_website_info.py @@ -0,0 +1,14 @@ + +# Copyright 2018 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +from odoo import http +from odoo.http import request +from odoo.addons.website.controllers.main import Website + + +class DisableWebsiteInfoController(Website): + + @http.route(auth="user") + def website_info(self): + return request.not_found() diff --git a/odoo/local-src/camptocamp_website_tools/hooks.py b/odoo/local-src/camptocamp_website_tools/hooks.py new file mode 100644 index 0000000..078a7da --- /dev/null +++ b/odoo/local-src/camptocamp_website_tools/hooks.py @@ -0,0 +1,21 @@ + +# Copyright 2018 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + + +def post_init_hook(cr, registry): + query = """ + UPDATE ir_ui_view + SET customize_show = False, active = False + WHERE key = 'website.show_website_info'; + """ + cr.execute(query) + + +def uninstall_hook(cr, registry): + query = """ + UPDATE ir_ui_view + SET customize_show = True + WHERE key = 'website.show_website_info'; + """ + cr.execute(query) diff --git a/odoo/local-src/dummy_test/__init__.py b/odoo/local-src/dummy_test/__init__.py new file mode 100644 index 0000000..40a96af --- /dev/null +++ b/odoo/local-src/dummy_test/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/odoo/local-src/dummy_test/__openerp__.py b/odoo/local-src/dummy_test/__openerp__.py new file mode 100644 index 0000000..22d78c3 --- /dev/null +++ b/odoo/local-src/dummy_test/__openerp__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +{'name': 'Dummy Test', + 'description': "Dummy module to check that Travis runs tests. " + "To delete once we have real addons", + 'version': '1.0', + 'author': 'Camptocamp', + 'license': 'AGPL-3', + 'category': 'Others', + 'depends': ['base', + ], + 'website': 'http://www.camptocamp.com', + 'data': [], + 'installable': True, + } diff --git a/odoo/local-src/dummy_test/tests/__init__.py b/odoo/local-src/dummy_test/tests/__init__.py new file mode 100644 index 0000000..d112e78 --- /dev/null +++ b/odoo/local-src/dummy_test/tests/__init__.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- + +from . import test_dummy diff --git a/odoo/local-src/dummy_test/tests/test_dummy.py b/odoo/local-src/dummy_test/tests/test_dummy.py new file mode 100644 index 0000000..c47333f --- /dev/null +++ b/odoo/local-src/dummy_test/tests/test_dummy.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- + +import unittest + + +class TestDummy(unittest.TestCase): + + def test_dummy(self): + """ Dummy test to verify that tests run """ + self.assertTrue(True) diff --git a/odoo/local-src/server_environment_files/__init__.py b/odoo/local-src/server_environment_files/__init__.py new file mode 100644 index 0000000..40a96af --- /dev/null +++ b/odoo/local-src/server_environment_files/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/odoo/local-src/server_environment_files/__openerp__.py b/odoo/local-src/server_environment_files/__openerp__.py new file mode 100644 index 0000000..9e1a30e --- /dev/null +++ b/odoo/local-src/server_environment_files/__openerp__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# © 2016 Camptocamp +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). +{ + 'name': 'Server Environment Files', + 'version': '11.0.1.0.0', + 'author': 'Camptocamp', + 'license': 'AGPL-3', + 'category': 'Tools', + 'website': 'http://www.camptocamp.com', + 'images': [], + 'depends': [ + 'base' + ], + 'installable': True, +} diff --git a/odoo/local-src/server_environment_files/default/base.conf b/odoo/local-src/server_environment_files/default/base.conf new file mode 100644 index 0000000..a663c0b --- /dev/null +++ b/odoo/local-src/server_environment_files/default/base.conf @@ -0,0 +1,2 @@ +[ir.config_parameter] +ribbon.name=Dev diff --git a/odoo/local-src/server_environment_files/dev/base.conf b/odoo/local-src/server_environment_files/dev/base.conf new file mode 100644 index 0000000..f217277 --- /dev/null +++ b/odoo/local-src/server_environment_files/dev/base.conf @@ -0,0 +1,4 @@ +[outgoing_mail] +smtp_host = mailhog +smtp_port = 1025 +smtp_encryption = none diff --git a/odoo/local-src/server_environment_files/integration/base.conf b/odoo/local-src/server_environment_files/integration/base.conf new file mode 100644 index 0000000..5bb1fd6 --- /dev/null +++ b/odoo/local-src/server_environment_files/integration/base.conf @@ -0,0 +1,2 @@ +[ir.config_parameter] +ribbon.name=Integration diff --git a/odoo/local-src/server_environment_files/prod/base.conf b/odoo/local-src/server_environment_files/prod/base.conf new file mode 100644 index 0000000..cf4cace --- /dev/null +++ b/odoo/local-src/server_environment_files/prod/base.conf @@ -0,0 +1,2 @@ +[ir.config_parameter] +ribbon.name=False diff --git a/odoo/local-src/server_environment_files/test/base.conf b/odoo/local-src/server_environment_files/test/base.conf new file mode 100644 index 0000000..392fa16 --- /dev/null +++ b/odoo/local-src/server_environment_files/test/base.conf @@ -0,0 +1,2 @@ +[ir.config_parameter] +ribbon.name=Test diff --git a/odoo/migration.yml b/odoo/migration.yml new file mode 100644 index 0000000..811493d --- /dev/null +++ b/odoo/migration.yml @@ -0,0 +1,72 @@ +migration: + options: + install_command: odoo + ## PLATFORM + # install_args: --load=web,web_kanban,session_redis,attachment_s3,logging_json # Exoscale + # install_args: --load=web,web_kanban,session_redis,attachment_swift,logging_json # OVH + versions: + - version: 11.0.0 + operations: + pre: + - anthem songs.install.pre::main + post: + ## PLATFORM + ## Choose between the two options : ovh or exoscale + #- anthem openerp.addons.cloud_platform.songs::install_exoscale + #- anthem openerp.addons.cloud_platform.songs::install_ovh + - anthem songs.install.accounting::main + - anthem songs.install.logistics::main + - anthem songs.install.data_all::main + modes: + full: + operations: + post: + - anthem songs.install.data_full::main + #### import heavy stuff + #- importer.sh songs.install.inventory::setup_locations /odoo/data/install/stock.location.csv + #- anthem songs.install.inventory::location_compute_parents + # Uncomment the "migration" mode for migration projects + # migration: + # operations: + # pre: + # - psql -f odoo/songs/migration/pre.sql + # - anthem songs.migration.pre::pre + # - anthem songs.migration.pre_check_fields::pre + # post: + # - anthem songs.migration.post::post + # - anthem songs.migration.post_check_fields::post + sample: + operations: + post: + - anthem songs.sample.data_sample::main + addons: + upgrade: + ## PLATFORM + #- cloud_platform_exoscale or cloud_platform_ovh + ## Only if v9.0 and using attachment_s3 (exoscale) and `delivery_carrier_label`: + #- delivery_carrier_label_s3 + # --- + #### oca/server-tools + - base_technical_features + # - database_cleanup # Used for project migrations + - server_environment + - server_environment_ir_config_parameter + #### oca/web + - web_environment_ribbon + #### core + - base + # - sale_stock + # - purchase + #### camptocamp/odoo-dj + - base_dj + #### local-src + # - my_addon + # update step example + # - version: 10.x.x + # addons: + # upgrade: + # - foo_module + # - baz_module + # operations: + # post: + # - anthem songs.updates.update_10_x_x::main diff --git a/odoo/pending-merges.yaml b/odoo/pending-merges.yaml new file mode 100644 index 0000000..c759aca --- /dev/null +++ b/odoo/pending-merges.yaml @@ -0,0 +1,25 @@ +# file used with gitaggregate: https://github.com/acsone/git-aggregator +# please see docs in docs/how-to-integrate-pull-request.md +# +# Examples: +# ./external-src/bank-payment: +# remotes: +# oca: git@github.com:OCA/bank-payment.git +# camptocamp: git@github.com:camptocamp/bank-payment.git +# merges: +# - oca 11.0 +# # add module account_invoice_bank_selection +# - oca refs/pull/236/head +# target: &default_target camptocamp merge-branch-0000-master +# ./external-src/bank-statement-reconcile: +# remotes: +# oca: git@github.com:OCA/bank-statement-reconcile.git +# camptocamp: git@github.com:camptocamp/bank-statement-reconcile.git +# merges: +# - oca 11.0 +# # port account_mass_reconcile +# - oca refs/pull/136/head +# # port of account_statement_base_import +# - oca refs/pull/137/head +# target: *default_target + diff --git a/odoo/requirements.txt b/odoo/requirements.txt new file mode 100644 index 0000000..20b3a03 --- /dev/null +++ b/odoo/requirements.txt @@ -0,0 +1,29 @@ +# base_dj +Jinja2 +autopep8 +pylint +unicodecsv +# Improve mimetype guessing. +# Requires `libmagic1` to be installed sys-wide +python-magic + +## PLATFORM +#redis==2.10.5 +#python-json-logger==0.1.5 +#statsd==3.2.1 + +## PLATFORM, for s3 storage at Exoscale +#boto==2.42.0 + +## PLATFORM, for Swift storage at OVH +#python-swiftclient==3.4.0 +#python-keystoneclient==3.13.0 + +# marabunta new versioning +# TODO move to docker-odoo-project as we release a new version +marabunta >= 0.9.0 + +## FOR MIGRATIONS +#openupgradelib==2.0.0 + +## custom diff --git a/odoo/setup.py b/odoo/setup.py new file mode 100644 index 0000000..80a90b6 --- /dev/null +++ b/odoo/setup.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- + +from setuptools import setup, find_packages + +with open('VERSION') as fd: + version = fd.read().strip() + +setup( + name="geo_11-odoo", + version=version, + description="geo_11 Odoo", + license='GNU Affero General Public License v3 or later (AGPLv3+)', + author="Camptocamp", + author_email="info@camptocamp.com", + url="www.camptocamp.com", + packages=['songs'] + ['songs.%s' % p for p in find_packages('./songs')], + include_package_data=True, + classifiers=[ + 'Development Status :: 4 - Beta', + 'License :: OSI Approved', + 'License :: OSI Approved :: ' + 'GNU Affero General Public License v3 or later (AGPLv3+)', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: Implementation :: CPython', + ], +) diff --git a/odoo/songs/__init__.py b/odoo/songs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/odoo/songs/common.py b/odoo/songs/common.py new file mode 100644 index 0000000..b7bb06d --- /dev/null +++ b/odoo/songs/common.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) +from builtins import str + +from pkg_resources import Requirement, resource_stream +from anthem.lyrics.loaders import load_csv_stream +from anthem.lyrics.records import switch_company + +import os + +req = Requirement.parse('geo_11-odoo') + + +def load_csv(ctx, path, model, delimiter=',', + header=None, header_exclude=None): + ctx.log_line( + '[DEPRECATED] use `from anthem.lyrics.loaders import load_csv. `' + '\nUpgrade `anthem` to version > 0.11.0. ' + '\nUpgrade `docker-odoo-project` image > 2.5.1 ' + 'or set `ODOO_DATA_PATH=/odoo/data` in Dockerfile.' + ) + content = resource_stream(req, path) + load_csv_stream(ctx, model, content, delimiter=delimiter, + header=header, header_exclude=header_exclude) + + +def load_users_csv(ctx, path, delimiter=','): + # make sure we don't send any email + model = ctx.env['res.users'].with_context({ + 'no_reset_password': True, + 'tracking_disable': True, + }) + load_csv(ctx, path, model, delimiter=delimiter) + + +def load_warehouses(ctx, company, path): + # in multicompany moded we must force the company + # otherwise the sequences that stock module generates automatically + # will have the wrong company assigned. + with switch_company(ctx, company) as ctx: + load_csv(ctx, path, 'stock.warehouse') + # NOTE: dirty hack here. + # We are forced to load the CSV twice because + # if you are modifying the existing base warehouse (stock.warehouse0) + # and you've changed the `code` (short name) + # the changes are not reflected on existing sequences + # until you load warehouse data again. + # We usually don't have that many WHs so... it's fine :) + load_csv(ctx, path, 'stock.warehouse') + + +def get_files(default_file): + """ Check if there is a DATA_DIR in environment else open default_file. + + DATA_DIR is passed by importer.sh when importing splitted file in parallel + + Returns a generator of file to import as DATA_DIR can contain a split of + csv file + """ + try: + dir_path = os.environ['DATA_DIR'] + except KeyError: + yield resource_stream(req, default_file) + else: + file_list = os.listdir(dir_path) + for file_name in file_list: + file_path = os.path.join(dir_path, file_name) + yield open(file_path) + + +def load_csv_parallel(ctx, model, csv_path, + defer_parent_computation=True, + delimiter=','): + """Use me to load an heavy file ~2k of lines or more. + + Then calling this method as a parameter of importer.sh + + importer.sh will split the file in chunks per number of processor + and per 500. + This method will be called once per chunk in order to do the csv loading + on multiple processes. + + Usage:: + + @anthem.log + def setup_locations(ctx): + load_csv_parallel( + ctx, + 'stock.location', + 'data/install/stock.location.csv', + defer_parent_computation=True) + + Then in `migration.yml`:: + + - importer.sh songs.install.inventory::setup_locations /odoo/data/install/stock.location.csv + # if defer_parent_computation=True + - anthem songs.install.inventory::location_compute_parents + + """ # noqa + load_ctx = ctx.env.context.copy() + if defer_parent_computation: + load_ctx.update({'defer_parent_store_computation': 'manually'}) + if isinstance(model, str): + model = ctx.env[model] + model = model.with_context(**load_ctx) + for content in get_files(csv_path): + load_csv_stream(ctx, model, content, delimiter=delimiter) + + +# Deprecated name for load_csv_parallel +deferred_import = load_csv_parallel + + +def deferred_compute_parents(ctx, model): + """Use me for heavy files after calling `deferred_import`. + + Usage:: + + @anthem.log + def location_compute_parents(ctx): + deferred_compute_parents(ctx, 'stock.location') + + """ + ctx.env[model]._parent_store_compute() diff --git a/odoo/songs/demo/__init__.py b/odoo/songs/demo/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/odoo/songs/install/__init__.py b/odoo/songs/install/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/odoo/songs/install/accounting.py b/odoo/songs/install/accounting.py new file mode 100644 index 0000000..2de0546 --- /dev/null +++ b/odoo/songs/install/accounting.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +import anthem +from anthem.lyrics.records import create_or_update + + +@anthem.log +def activate_multicurrency(ctx): + """ Activating multi-currency """ + employee_group = ctx.env.ref('base.group_user') + employee_group.write({ + 'implied_ids': [(4, ctx.env.ref('base.group_multi_currency').id)] + }) + + +@anthem.log +def create_bank_accounts(ctx): + """ Creating bank accounts """ + expense_type = ctx.env.ref('account.data_account_type_expenses') + records = [ + {'xmlid': '__setup__.account_1010', + 'name': 'XXX 00-001285-1', + 'code': '991010', + 'user_type_id': expense_type.id, + }, + {'xmlid': '__setup__.account_1020', + 'name': 'ZZZ BE7400700115500080000', + 'code': '991020', + 'user_type_id': expense_type.id, + }, + {'xmlid': '__setup__.account_1021', + 'name': 'ZZZ BE2300700115500172222', + 'code': '991021', + 'user_type_id': expense_type.id, + }, + ] + for record in records: + xmlid = record.pop('xmlid') + create_or_update(ctx, 'account.account', xmlid, record) + + +@anthem.log +def main(ctx): + """ Configuring accounting """ + activate_multicurrency(ctx) + create_bank_accounts(ctx) diff --git a/odoo/songs/install/data_all.py b/odoo/songs/install/data_all.py new file mode 100644 index 0000000..b32b09c --- /dev/null +++ b/odoo/songs/install/data_all.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +import anthem + +""" Data loaded in all modes + +The data loaded here will be loaded in the 'sample' and +'full' modes. + +""" + + +@anthem.log +def main(ctx): + """ Loading data """ diff --git a/odoo/songs/install/data_full.py b/odoo/songs/install/data_full.py new file mode 100644 index 0000000..49dc1c2 --- /dev/null +++ b/odoo/songs/install/data_full.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +import anthem + +""" File for full (production) data + +These songs will be called on integration and production server at the +installation. + +""" + + +@anthem.log +def main(ctx): + """ Loading full data """ + # nothing yet + return diff --git a/odoo/songs/install/logistics.py b/odoo/songs/install/logistics.py new file mode 100644 index 0000000..830b4d4 --- /dev/null +++ b/odoo/songs/install/logistics.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +import anthem + + +@anthem.log +def activate_options(ctx): + """ Activating logistics options """ + employee_group = ctx.env.ref('base.group_user') + employee_group.write({ + 'implied_ids': [(4, ctx.env.ref('stock.group_production_lot').id), + (4, ctx.env.ref('stock.group_locations').id), + (4, ctx.env.ref('stock.group_adv_location').id)] + + }) + + +@anthem.log +def set_delivery_pick_ship(ctx): + """ Setting pick-ship on the warehouse """ + ctx.env.ref('stock.warehouse0').delivery_steps = 'pick_ship' + + +@anthem.log +def main(ctx): + """ Configuring logistics """ + activate_options(ctx) + set_delivery_pick_ship(ctx) diff --git a/odoo/songs/install/pre.py b/odoo/songs/install/pre.py new file mode 100644 index 0000000..1de7dc6 --- /dev/null +++ b/odoo/songs/install/pre.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +import os + +from base64 import b64encode +from pkg_resources import resource_string + +import anthem + +from ..common import req + + +@anthem.log +def setup_company(ctx): + """ Setup company """ + # load logo on company + logo_content = resource_string(req, 'data/images/company_main_logo.png') + b64_logo = b64encode(logo_content) + + values = { + 'name': "geo_11", + 'street': "", + 'zip': "", + 'city': "", + 'country_id': ctx.env.ref('base.ch').id, + 'phone': "+41 00 000 00 00", + 'fax': "+41 00 000 00 00", + 'email': "contact@geo_11.ch", + 'website': "http://www.geo_11.ch", + 'vat': "VAT", + 'logo': b64_logo, + 'currency_id': ctx.env.ref('base.CHF').id, + } + ctx.env.ref('base.main_company').write(values) + + +@anthem.log +def setup_language(ctx): + """ Installing language and configuring locale formatting """ + for code in ('fr_FR',): + ctx.env['base.language.install'].create({'lang': code}).lang_install() + ctx.env['res.lang'].search([]).write({ + 'grouping': [3, 0], + 'date_format': '%d/%m/%Y', + }) + + +@anthem.log +def admin_user_password(ctx): + """ Changing admin password """ + # TODO: default admin password, must be changed. + # Please add your new password in lastpass with the following name: + # [odoo-test] geo_11 test admin user + # In the lastpass directory: Shared-C2C-Odoo-External + # To get an encrypted password: + # $ docker-compose run --rm odoo python -c \ + # "from passlib.context import CryptContext; \ + # print CryptContext(['pbkdf2_sha512']).encrypt('my_password')" + if os.environ.get('RUNNING_ENV') == 'dev': + ctx.log_line('Not changing password for dev RUNNING_ENV') + return + ctx.env.user.password_crypt = ( + '$pbkdf2-sha512$19000$tVYq5dwbI0Tofc85RwiBcA$a1tNyzZ0hxW9kXKIyEwN1' + 'j84z5gIIi1PQmvtFHuxQ4rNA2RaXSGLjXnEifl6ZQZ/wiBJK6fZkeaGgF3DW9A2Bg' + ) + + +@anthem.log +def main(ctx): + """ Main: creating base config """ + setup_company(ctx) + setup_language(ctx) + admin_user_password(ctx) diff --git a/odoo/songs/migration/__init__.py b/odoo/songs/migration/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/odoo/songs/migration/helper.py b/odoo/songs/migration/helper.py new file mode 100644 index 0000000..3dc9fac --- /dev/null +++ b/odoo/songs/migration/helper.py @@ -0,0 +1,55 @@ +# Copyright 2018 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +from openupgradelib.openupgrade import logged_query + + +def update_module_moved_models(cr, models, old_module, new_module): + """ Update metadata for models moved to another module """ + + # TODO: If that function works correctly on many projects, + # TODO: see to propose it on openupgradelib. + + for model in models: + query = 'SELECT id FROM ir_model WHERE name = %s' + cr.execute(query, [model]) + row = cr.fetchone() + if row: + model_id = row[0] + + # Update the XML ID of the model + query = """ + UPDATE + ir_model_data + SET + module = %s + WHERE + module = %s + AND + model = 'ir.model' + AND + res_id = %s + """ + logged_query(cr, query, (new_module, old_module, model_id)) + + # Update the XML ID of the fields of the model + query = """ + UPDATE + ir_model_data + SET + module = %s + WHERE + module = %s + AND + model = 'ir.model.fields' + AND + res_id IN ( + SELECT + id + FROM + ir_model_fields + WHERE + model_id = %s + ) + """ + logged_query(cr, query, (new_module, old_module, model_id)) diff --git a/odoo/songs/migration/post.py b/odoo/songs/migration/post.py new file mode 100644 index 0000000..bbd441f --- /dev/null +++ b/odoo/songs/migration/post.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +import anthem +from anthem.lyrics.modules import uninstall +from odoo.modules.module import load_information_from_description_file +from odoo.exceptions import UserError + + +@anthem.log +def uninstall_modules(ctx): + """ Uninstall modules """ + uninstall( + ctx, + [ + # Here we need to list: + # all modules installed in previous version of odoo, + # but we don't want to keep. + ] + ) + + +@anthem.log +def database_cleanup(ctx): + """ Clean database """ + + # Clean models from uninstalled modules + ctx.log_line('Start purging models') + try: + purge_models = ctx.env['cleanup.purge.wizard.model'].create({}) + purge_model_lines = purge_models.purge_line_ids + for purge_model_line in purge_model_lines: + ctx.log_line('Try to purge: %s' % purge_model_line.name) + purge_model_line.purge() + except UserError as e: + ctx.log_line("Cleanup resulted in error: '{}'".format(str(e))) + + # Clean columns/fields from uninstalled modules + ctx.log_line('Start purging columns') + try: + purge_columns = ctx.env['cleanup.purge.wizard.column'].create({}) + purge_column_lines = purge_columns.purge_line_ids + for purge_column_line in purge_column_lines: + ctx.log_line('Try to purge: %s' % purge_column_line.name) + purge_column_line.purge() + except UserError as e: + ctx.log_line("Cleanup resulted in error: '{}'".format(str(e))) + + # Clean tables from uninstalled modules + ctx.log_line('Start purging tables') + try: + purge_tables = ctx.env['cleanup.purge.wizard.table'].create({}) + purge_table_lines = purge_tables.purge_line_ids.filtered( + lambda l: l.name not in [ + # The marabunta_version table must never be deleted + 'marabunta_version', + ] + ) + for purge_table_line in purge_table_lines: + ctx.log_line('Try to purge: %s' % purge_table_line.name) + purge_table_line.purge() + except UserError as e: + ctx.log_line("Cleanup resulted in error: '{}'".format(str(e))) + + # Clean models data from uninstalled modules + ctx.log_line('Start purging datas') + try: + purge_datas = ctx.env['cleanup.purge.wizard.data'].create({}) + purge_data_lines = purge_datas.purge_line_ids.filtered( + # Metadata exported, imported or from setup must not be deleted + lambda l: '__export__' not in l.name and + '__setup__' not in l.name and + '__import__' not in l.name + ) + for purge_data_line in purge_data_lines: + ctx.log_line('Try to purge: %s' % purge_data_line.name) + purge_data_line.purge() + except UserError as e: + ctx.log_line("Cleanup resulted in error: '{}'".format(str(e))) + + # Clean menus from uninstalled modules + ctx.log_line('Start purging menus') + try: + purge_menus = ctx.env['cleanup.purge.wizard.menu'].create({}) + purge_menu_lines = purge_menus.purge_line_ids + for purge_menu_line in purge_menu_lines: + ctx.log_line('Try to purge: %s' % purge_menu_line.name) + purge_menu_line.purge() + except UserError as e: + ctx.log_line("Cleanup resulted in error: '{}'".format(str(e))) + + +@anthem.log +def clean_unavailable_modules(ctx): + """Clean unavailable modules + + When we migrate a project, + we have a lot of modules which became unavailable in the new version. + This function will clean the module list to delete unavailable modules. + """ + module_model = ctx.env['ir.module.module'] + all_modules = module_model.search([ + + # Here we need to list: + # all modules uninstalled we want to migrate + # to avoid to remove them + + # Example: + + # ( + # 'name', + # 'not in', + # [ + # 'account_asset_management', # To migrate! + # ] + # ) + ]) + for module in all_modules: + info = load_information_from_description_file(module.name) + if not info: + if module.state in ['uninstalled', 'uninstallable']: + ctx.log_line( + 'MODULE UNAVAILABLE (will be deleted) : %s' % module.name + ) + if ctx.env['ir.model.data'].search([ + ('module', '=', module.name) + ]): + ctx.log_line( + "===> CAN'T UNLINK MODULE, WE HAVE METADATA " + "(See if we want to migrate or uninstall the module)" + ) + else: + module.unlink() + else: + ctx.log_line( + 'MODULE UNAVAILABLE BUT BAD STATE : %s (%s)' % + (module.name, module.state) + ) + + module_model.update_list() + + +@anthem.log +def post(ctx): + """ POST: migration """ + uninstall_modules(ctx) + # Check the list of cleaned data before uncomment the call of this function + # database_cleanup(ctx) + clean_unavailable_modules(ctx) diff --git a/odoo/songs/migration/post_check_fields.py b/odoo/songs/migration/post_check_fields.py new file mode 100644 index 0000000..2c2160a --- /dev/null +++ b/odoo/songs/migration/post_check_fields.py @@ -0,0 +1,110 @@ +# Copyright 2018 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +import anthem +import csv +import os + + +@anthem.log +def update_base(ctx): + """Update base + + To see which fields have been deleted in standard build, + we need to do an update base which will recreate deleted fields. + """ + ctx.env['ir.module.module'].search( + [('name', '=', 'base')] + ).button_immediate_upgrade() + + +@anthem.log +def check_fields(ctx): + """Check fields + + Compare all fields defined in database between + the fields before update of modules + and the fields after update of modules. + To avoid deletion of fields because + a field has been moved from a module to another. + """ + # Get the original fields saved in a CSV file before update of modules + original_fields = {} + # The explanation for the chosen directory is in the pre_check_fields.py + with open('/data/odoo/pre_check_fields.csv', 'r') as csvfile: + reader = csv.reader(csvfile) + for model_name, module_name, field_name in reader: + if model_name not in original_fields: + original_fields[model_name] = {} + if field_name not in original_fields[model_name]: + original_fields[model_name][field_name] = [] + original_fields[model_name][field_name].append(module_name) + + # Get the final fields we have now in database + final_fields = {} + ctx.env.cr.execute( + """ +SELECT + f.model AS model_name, + d.module AS module_name, + f.name AS field_name +FROM + ir_model_fields f +INNER JOIN + ir_model_data d + ON f.id = d.res_id + AND d.model = 'ir.model.fields' + """ + ) + fields_data = ctx.env.cr.fetchall() + for model_name, module_name, field_name in fields_data: + if model_name not in final_fields: + final_fields[model_name] = {} + if field_name not in final_fields[model_name]: + final_fields[model_name][field_name] = [] + final_fields[model_name][field_name].append(module_name) + + # For each fields we have now in database, + # we will compare the list of modules + # which define this field before and after the update of modules. + for model_name, model_fields in final_fields.items(): + for field_name, field_modules in model_fields.items(): + if field_name in original_fields.get(model_name, {}): + original_modules = set( + original_fields[model_name][field_name] + ) + new_modules = set(field_modules) + if original_modules and original_modules != new_modules: + # We have not the same list of modules: + # we display a line in build log. + ctx.log_line( + 'PROBLEM ON DEFINED FIELD: ' + 'Model %s / ' + 'Field %s / ' + 'Old modules %s / ' + 'New modules %s' % + ( + model_name, + field_name, + list(original_modules), + list(new_modules) + ) + ) + + +@anthem.log +def post(ctx): + """POST: migration check fields""" + env = os.environ.get('RUNNING_ENV') + # We do the check only in dev mode. + # To avoid to do the check each times, + # the check is done only if it's not disabled in environment variables. + if env == 'dev': + migration_check_fields = os.environ.get('MIGRATION_CHECK_FIELDS') + if migration_check_fields != 'True': + ctx.log_line( + 'If you never check the fields, please do it!' + ) + else: + update_base(ctx) + check_fields(ctx) diff --git a/odoo/songs/migration/pre.py b/odoo/songs/migration/pre.py new file mode 100644 index 0000000..0126a3e --- /dev/null +++ b/odoo/songs/migration/pre.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +import anthem +import os +from .helper import update_module_moved_models +from openupgradelib.openupgrade import update_module_names,\ + update_module_moved_fields + + +@anthem.log +def fix_path_on_attachments(ctx): + """ Fix path on attachments """ + env = os.environ.get('RUNNING_ENV') + if env in ('prod', 'integration'): + # Update attachment given by odoo for the database migration + ctx.env.cr.execute(""" + UPDATE + ir_attachment + SET + store_fname = 's3://geo_11-odoo-%s/' || store_fname + WHERE + store_fname IS NOT NULL + AND store_fname NOT LIKE 's3://%%'; + """ % (env,)) + else: + # Remove the s3 attachment + ctx.env.cr.execute(""" +DELETE FROM + ir_attachment +WHERE + store_fname IS NOT NULL +AND store_fname LIKE 's3://%'; + """) + + +@anthem.log +def rename_modules(ctx): + """ Rename modules """ + update_module_names( + ctx.env.cr, + [ + # Here we need to list: + # all modules on which + # the module is renamed between the old and new version + + # Example: + # ('account_financial_report_webkit', 'account_financial_report'), + ], + merge_modules=True, + ) + + +@anthem.log +def update_moved_models(ctx): + """ Update model moved to another module """ + + # When a model is moved to another module, + # if the new module is updated before the old module is uninstalled, + # the model is removed. + + # That function will update the metadata of the model + # to indicate to Odoo the new module of the model. + + # Example: + + # update_module_moved_models( + # ctx.env.cr, + # [ + # # Here we need to list: + # # all models which are moved in another module + # + # 'my.custom.model' + # ], + # 'old_module', # Old module of the models + # 'new_module', # New module of the models + # ) + + +@anthem.log +def update_moved_fields(ctx): + """ Update fields moved to another module """ + + # When a field is moved to another module, + # if the new module is updated before the old module is uninstalled, + # the field is removed. + + # That function will update the metadata of the field + # to indicate to Odoo the new module of the field. + + # Example: + + # update_module_moved_fields( + # ctx.env.cr, + # 'product.template', # Model of the field + # ['purchase_ok'], # Fields moved + # 'invoice_webkit', # Old module of the fields + # 'product', # New module of the fields + # ) + + +@anthem.log +def pre(ctx): + """ PRE: migration """ + fix_path_on_attachments(ctx) + rename_modules(ctx) + update_moved_models(ctx) + update_moved_fields(ctx) diff --git a/odoo/songs/migration/pre.sql b/odoo/songs/migration/pre.sql new file mode 100644 index 0000000..28d25d6 --- /dev/null +++ b/odoo/songs/migration/pre.sql @@ -0,0 +1,13 @@ +-- Reset 'state' of ir_module_module +-- +-- When we receive the database from the migration service, addons are +-- 'to upgrade', set them to uninstalled. +-- +-- With that change, in migration.yml file, +-- we need to add all modules we want to keep installed. +UPDATE + ir_module_module +SET + state = 'uninstalled' +WHERE + state IN ('to install', 'to upgrade'); diff --git a/odoo/songs/migration/pre_check_fields.py b/odoo/songs/migration/pre_check_fields.py new file mode 100644 index 0000000..8fea21d --- /dev/null +++ b/odoo/songs/migration/pre_check_fields.py @@ -0,0 +1,66 @@ +# Copyright 2018 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +import anthem +import os +from odoo.addons.base_dj.utils import csv_from_data + + +@anthem.log +def pre_check_fields(ctx): + """Pre check fields + + Create a CSV file with all fields in the database. + Used at the end of the build to see + which fields must be changed of modules. + To avoid deletion of fields because + a field has been moved from a module to another. + """ + ctx.env.cr.execute( + """ +SELECT + f.model AS model_name, + d.module AS module_name, + f.name AS field_name +FROM + ir_model_fields f +INNER JOIN + ir_model_data d + ON f.id = d.res_id + AND d.model = 'ir.model.fields' + """ + ) + fields_data = ctx.env.cr.fetchall() + + data = csv_from_data(fields_data[0], fields_data[0:]) + + # We can't create this file in another places, for some reasons: + # - We can't use the /tmp directory if we want + # to keep the file for test or for launch songs manually. + # - We can't use environment variable "ODOO_DATA_PATH", + # because the directory /data/odoo is a shared volume on which + # the owner is the developper. + # And the internal user "odoo" used for launch migration + # has no rights to write in this repository. + # For information, the migration is launched like this: + # https://github.com/camptocamp/docker-odoo-project/ + # blob/master/bin/docker-entrypoint.sh#L120 + with open('/data/odoo/pre_check_fields.csv', 'w') as fields_csv: + fields_csv.write(str(data, 'utf-8')) + + +@anthem.log +def pre(ctx): + """PRE: migration check fields""" + env = os.environ.get('RUNNING_ENV') + # We do the check only in dev mode. + # To avoid to do the check each times, + # the check is done only if it's not disabled in environment variables. + if env == 'dev': + migration_check_fields = os.environ.get('MIGRATION_CHECK_FIELDS') + if migration_check_fields != 'True': + ctx.log_line( + 'If you never check the fields, please do it!' + ) + else: + pre_check_fields(ctx) diff --git a/odoo/songs/sample/__init__.py b/odoo/songs/sample/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/odoo/songs/sample/data_sample.py b/odoo/songs/sample/data_sample.py new file mode 100644 index 0000000..149cbd6 --- /dev/null +++ b/odoo/songs/sample/data_sample.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +import anthem +from ..common import load_csv + +""" File for a sample dataset + +These songs will be called when the mode is 'sample', we should import only +excerpt of data, while the full data is only imported in the 'full' mode. + +This dataset must be lighter to have fast build for dev and test instances. + + +""" + + +@anthem.log +def import_customers(ctx): + """ Importing customers from csv """ + load_csv(ctx, 'data/sample/customers.csv', 'res.partner') + + +@anthem.log +def main(ctx): + """ Loading sample data """ + import_customers(ctx) diff --git a/odoo/songs/updates/__init__.py b/odoo/songs/updates/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/odoo/songs/updates/update_10_x_x.py b/odoo/songs/updates/update_10_x_x.py new file mode 100644 index 0000000..36ecfd1 --- /dev/null +++ b/odoo/songs/updates/update_10_x_x.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +import anthem + + +@anthem.log +def do_something(ctx): + pass + + +@anthem.log +def main(ctx): + do_something(ctx) diff --git a/odoo/src b/odoo/src new file mode 160000 index 0000000..27d5776 --- /dev/null +++ b/odoo/src @@ -0,0 +1 @@ +Subproject commit 27d57763997c1933da47403db7cff3a1176acdaf diff --git a/odoo/start-entrypoint.d/010_set_enterprise_demo b/odoo/start-entrypoint.d/010_set_enterprise_demo new file mode 100755 index 0000000..663af0c --- /dev/null +++ b/odoo/start-entrypoint.d/010_set_enterprise_demo @@ -0,0 +1,41 @@ +#!/bin/bash + +# +# Often in development mode, we retrieve a database from a production server +# with a enterprise subscription. This script automatically changes +# the config parameters to set enterprise subscription in demo mode +# when working in dev. +# + +keep_enterprise_subscription=$(echo "${ODOO_KEEP_ENTERPRISE_SUBSCRIPTION}" | tr '[:upper:]' '[:lower:]' ) + +if [ "$keep_enterprise_subscription" = "true" ]; then + exit 0 +fi + +if [ "$RUNNING_ENV" = "dev" ] || [ "$RUNNING_ENV" = "test" ] || [ "$RUNNING_ENV" = "integration" ] ; then + if [ "$( psql -tAc "SELECT 1 FROM pg_database WHERE datname='$DB_NAME'" )" != '1' ] + then + echo "Database does not exist, ignoring script" + exit 0 + fi + + echo "Setting enterprise subscription in demo mode" + + psql << EOF + + DELETE FROM ir_config_parameter + WHERE key = 'database.enterprise_code'; + + UPDATE ir_config_parameter + SET value = 'demo' + WHERE key = 'database.expiration_reason' + AND value != 'demo'; + + UPDATE ir_config_parameter + SET value = CURRENT_DATE + INTERVAL '2 month' + WHERE key = 'database.expiration_date'; + +EOF + +fi diff --git a/odoo/start-entrypoint.d/011_reset_uuid b/odoo/start-entrypoint.d/011_reset_uuid new file mode 100755 index 0000000..373cbfd --- /dev/null +++ b/odoo/start-entrypoint.d/011_reset_uuid @@ -0,0 +1,33 @@ +#!/bin/bash + +# +# Often in development or integration mode, we retrieve a database from a production server +# with a production database uuid. This script automatically changes +# the config parameters to set a random uuid in order to fit the need of the license +# management done by odoo.com +# + +keep_database_uuid=$(echo "${ODOO_KEEP_DATABASE_UUID}" | tr '[:upper:]' '[:lower:]' ) + +if [ "$keep_database_uuid" = "true" ]; then + exit 0 +fi + +if [ "$RUNNING_ENV" != "prod" ] ; then + if [ "$( psql -tAc "SELECT 1 FROM pg_database WHERE datname='$DB_NAME'" )" != '1' ] + then + echo "Database does not exist, ignoring script" + exit 0 + fi + UUID=`cat /proc/sys/kernel/random/uuid` + echo "Set new database uuid" + + psql << EOF + + UPDATE ir_config_parameter + SET value = '$UUID' + WHERE key = 'database.uuid'; + +EOF + +fi diff --git a/tasks/__init__.py b/tasks/__init__.py new file mode 100644 index 0000000..d6ace44 --- /dev/null +++ b/tasks/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- + +import os +import importlib + +from invoke import Collection + +ns = Collection() + + +for filename in os.listdir(os.path.dirname(os.path.realpath(__file__))): + if filename.endswith('.py') and not filename.startswith('__'): + modname = filename[:-3] + mod = importlib.import_module('.' + modname, package='tasks') + ns.add_collection(mod) diff --git a/tasks/common.py b/tasks/common.py new file mode 100644 index 0000000..cc879a5 --- /dev/null +++ b/tasks/common.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +from __future__ import print_function + +import errno +import os +import shutil +import tempfile +import yaml + +from builtins import input + +from contextlib import contextmanager +from invoke import exceptions + +try: + import git_aggregator.config + import git_aggregator.main + import git_aggregator.repo +except ImportError: + print('Please install git-aggregator') + + +def root_path(): + return os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) + + +def build_path(path, from_root=True, from_file=None): + if not from_file and from_root: + base_path = root_path() + else: + if from_file is None: + from_file = __file__ + base_path = os.path.dirname(os.path.realpath(from_file)) + + return os.path.join(base_path, path) + + +VERSION_FILE = build_path('odoo/VERSION') +HISTORY_FILE = build_path('HISTORY.rst') +PENDING_MERGES = build_path('odoo/pending-merges.yaml') +MIGRATION_FILE = build_path('odoo/migration.yml') +COOKIECUTTER_CONTEXT = build_path('.cookiecutter.context.yml') + +GIT_REMOTE_NAME = 'camptocamp' +TEMPLATE_GIT = 'git@github.com:camptocamp/odoo-template.git' + + +def cookiecutter_context(): + with open(COOKIECUTTER_CONTEXT, 'rU') as f: + return yaml.load(f.read()) + + +def exit_msg(message): + print(message) + raise exceptions.Exit(1) + + +@contextmanager +def cd(path): + prev = os.getcwd() + os.chdir(os.path.expanduser(path)) + try: + yield + finally: + os.chdir(prev) + + +def current_version(): + with open(VERSION_FILE, 'rU') as fd: + version = fd.read().strip() + return version + + +def ask_or_abort(message): + r = input(message + ' (y/N) ') + if r not in ('y', 'Y', 'yes'): + exit_msg('Aborted') + + +def check_git_diff(ctx, direct_abort=False): + try: + ctx.run('git diff --quiet --exit-code') + ctx.run('git diff --cached --quiet --exit-code') + except exceptions.Failure: + if direct_abort: + exit_msg('Your repository has local changes. Abort.') + ask_or_abort('Your repository has local changes, ' + 'are you sure you want to continue?') + + +@contextmanager +def tempdir(): + name = tempfile.mkdtemp() + try: + yield name + finally: + try: + shutil.rmtree(name) + except OSError as e: + # already deleted + if e.errno != errno.ENOENT: + raise + + +def search_replace(file_path, old, new): + """ Replace a text in a file on each lines """ + shutil.move(file_path, file_path + '.bak') + with open(file_path + '.bak', 'r') as f_r: + with open(file_path, 'w') as f_w: + for line in f_r: + f_w.write(line.replace(old, new)) + + +def fix_repo_path(path): + # FIXME: diry fix to make sure submodule path is correct. + # Premise: gitaggregator assumes paths are relative to pending merge file + # (odoo/pending-merges.yml as of today) + # but we run it from the root of the project, which leads to repo.cwd like: + # /home/sorsi/dev/projects/fluxdock/external-src/connector-interfaces + # which is not correct! Here we hack it to make it absolutely correct + # and then we'll have to adatp it or trash or fix gitaggregator + # when we move pending merges to separated files in proj root (#225). + if '/odoo/' in path: + return path + proj_path = root_path() + repo_path = path.replace(proj_path.rstrip('/'), '').strip('/') + return proj_path + '/odoo/' + repo_path + + +def get_aggregator_repositories(): + repositories = git_aggregator.config.load_config( + build_path(PENDING_MERGES) + ) + for repo_dict in repositories: + repo_dict['cwd'] = fix_repo_path(repo_dict['cwd']) + yield git_aggregator.repo.Repo(**repo_dict) + + +def get_aggregator_repo(submodule_path): + """Build the git_aggregator repo object. + + Parses the pending merges file and creates the repo object + for the one that has the right submodule path. + """ + repo = None + found = False + for repo in get_aggregator_repositories(): + if git_aggregator.main.match_dir(repo.cwd, submodule_path): + found = True + break + if not found: + exit_msg( + 'No submodule found in pending-merges matching path {}'.format( + submodule_path) + ) + return repo diff --git a/tasks/database.py b/tasks/database.py new file mode 100644 index 0000000..d218931 --- /dev/null +++ b/tasks/database.py @@ -0,0 +1,215 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +from __future__ import print_function +import psycopg2 +import getpass +import requests +import gnupg +import time +from os.path import expanduser +from contextlib import contextmanager +from invoke import task +from datetime import datetime + +from .common import cookiecutter_context + + +@contextmanager +def ensure_db_container_up(ctx): + """ Ensure the DB container is up and running. + + :param ctx: + :return: True if already up, False if it wasn't + """ + try: + ctx.run('docker-compose port db 5432', hide=True) + started = True + except Exception: + ctx.run('docker-compose up -d db', hide=True) + running = False + # Wait for the container to start + count = 0 + while not running: + try: + ctx.run('docker-compose port db 5432', hide=True) + running = True + except Exception as e: + count += 1 + # Raise the error after 3 failed attempts + if count >= 3: + raise e + print('Waiting for DB container to start') + time.sleep(0.3) + started = False + yield + # Stop the container if it wasn't already up and running + if not started: + ctx.run('docker-compose stop db', hide=True) + + +def get_db_container_port(ctx): + """Get and return DB container port""" + run_res = ctx.run('docker-compose port db 5432', hide=True) + return str(int(run_res.stdout.split(':')[-1])) + + +def expand_path(path): + if path.startswith('~'): + path = expanduser(path) + return path + + +@task(name='list-versions') +def list_versions(ctx): + """Print a table of DBs with Marabunta version and install date.""" + with ensure_db_container_up(ctx): + db_port = get_db_container_port(ctx) + dsn = "host=localhost dbname=postgres " \ + "user=odoo password=odoo port=%s" % db_port + # Connect and list DBs + with psycopg2.connect(dsn) as db_connection: + with db_connection.cursor() as db_cursor: + db_cursor.execute( + "SELECT datname " + "FROM pg_database " + "WHERE datistemplate = false " + "AND datname not in ('postgres', 'odoo');") + databases_fetch = db_cursor.fetchall() + db_list = [ + db_name_tuple[0] for db_name_tuple in databases_fetch + ] + res = {} + # Get version for each DB + for db_name in db_list: + dsn = "host=localhost dbname=%s user=odoo " \ + "password=odoo port=%s" % (db_name, db_port) + with psycopg2.connect(dsn) as db_connection: + with db_connection.cursor() as db_cursor: + try: + db_cursor.execute( + "SELECT date_done, number " + "FROM marabunta_version " + "ORDER BY date_done DESC " + "LIMIT 1;") + version_tuple = db_cursor.fetchone() + except psycopg2.ProgrammingError: + # Error expected when marabunta_version table does not + # exist + res[db_name] = (None, 'unknown') + continue + res[db_name] = version_tuple + print("{:<20} {:<10} {:<12}".format('DB Name', 'Version', + 'Install date')) + print('======= ======= ============') + for db_name, version in sorted(res.iteritems(), + key=lambda x: x[1][0] or datetime.min, + reverse=True): + if version[0]: + time = version[0].strftime('%Y-%m-%d') + else: + time = 'unknown' + print( + "{:<20} {:<10} {:<12}".format(db_name, version[1], time)) + + +@task(name='local-dump') +def local_dump(ctx, db_name='odoodb', path='.'): + """Create a PG Dump for given database name. + + :param db_name: Name of the Database to dump + :param path: Local path to store the dump + :return: Dump file path + """ + path = expand_path(path) + with ensure_db_container_up(ctx): + db_port = get_db_container_port(ctx) + username = getpass.getuser() + project_name = cookiecutter_context()['project_name'] + dump_name = '%s_%s-%s.pg' % ( + username, project_name, datetime.now().strftime('%Y%m%d-%H%M%S')) + dump_file_path = '%s/%s' % (path, dump_name) + ctx.run('pg_dump -h localhost -p %s --format=c -U odoo --file %s %s' % ( + db_port, dump_file_path, db_name + ), hide=True) + print('Dump succesfully generated at %s' % dump_file_path) + return dump_file_path + + +def encrypt_for_dump_bags(ctx, dump_file_path): + """Encrypt dump to GPG using keys from dump-bag.odoo.camptocamp.ch + + :param dump_file_path: Path of *.pg dump file + :return: Path of the encrypted GPG dump + """ + gpg_file_path = '%s.gpg' % dump_file_path + r = requests.get('https://dump-bag.odoo.camptocamp.ch/keys') + gpg = gnupg.GPG() + gpg.import_keys(r.text) + fingerprints = [str(rec['fingerprint']) for rec in gpg.list_keys()] + with open(dump_file_path, 'rb') as dump_file: + data = gpg.encrypt(dump_file, *fingerprints) + with open(gpg_file_path, 'wb') as encrypted_dump: + encrypted_dump.write(data.data) + print('Dump successfully encrypted at %s' % gpg_file_path) + return gpg_file_path + + +@task(name='share-on-dumps-bag') +def share_on_dumps_bag(ctx, dump_file_path): + """Encrypt and push a dump to Odoo Dump bags manually. + + GPG dump will be pushed to url s3://odoo-dumps/your_username + + :param dump_file_path: Path of *.pg dump file + """ + dump_file_path = expand_path(dump_file_path) + gpg_file_path = encrypt_for_dump_bags(ctx, dump_file_path) + username = getpass.getuser() + ctx.run( + 'aws --profile=odoo-dumps s3 cp %s s3://odoo-dumps/%s' % ( + gpg_file_path, '/'.join([username, gpg_file_path.split('/')[-1]]) + ), hide=True) + # Set ShortExpire tag for the dump to be auto deleted after 1 week + ctx.run( + 'aws --profile=odoo-dumps s3api put-object-tagging ' + '--bucket odoo-dumps --key %s/%s ' + '--tagging="TagSet=[{Key=ShortExpire,Value=True}]"' % ( + username, gpg_file_path.split('/')[-1]), hide=True + ) + print('Encrypted dump successfully shared on dumps bag.') + print('Note this dump will be auto-deleted after 7 days.') + + +@task(name='dump-and-share') +def dump_and_share(ctx, db_name='odoodb', tmp_path='/tmp', + keep_local_dump=False): + """Create a dump and share it on Odoo Dumps Bag. + + Usage : invoke database.dump-and-share --db-name=mydb + + :param db_name: Name of the Database to dump + :param tmp_path: Temporary local path to store the dump + :param keep_local_dump: Boolean to keep the generated and encrypted dumps + locally + """ + tmp_path = expand_path(tmp_path) + dump_file_path = local_dump(ctx, db_name=db_name, path=tmp_path) + share_on_dumps_bag(ctx, dump_file_path) + if not keep_local_dump: + ctx.run('rm %s' % dump_file_path) + ctx.run('rm %s.gpg' % dump_file_path) + + +@task(name='empty-my-dump-bag') +def empty_my_dump_bag(ctx): + """Empty the content of s3://odoo-dumps/your_username. + + Please call this function as soon as your recipient did download your dump. + """ + username = getpass.getuser() + ctx.run( + 'aws --profile=odoo-dumps s3 rm s3://odoo-dumps/%s/ --recursive' % + username, hide=True + ) + print('Your dumps bag has been emptied successfully.') diff --git a/tasks/deprecate.py b/tasks/deprecate.py new file mode 100644 index 0000000..55d79ce --- /dev/null +++ b/tasks/deprecate.py @@ -0,0 +1,176 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) +from __future__ import print_function + +import os + +try: + from ruamel.yaml import YAML +except ImportError: + print('Please install ruamel.yaml') + +from invoke import task +from .common import ( + MIGRATION_FILE, + build_path, + search_replace, +) + + +@task(name='demo-to-sample') +def demo_to_sample(ctx): + """ Renaming of demo to sample for MARABUNTA_MODE + + This intend to fix files that aren't synced + + It will edit the following files: + - .travis.yml + - docker-compose.overide.yml + - odoo/migration.yml + - odoo/songs/install/data_all.py (for comment) + - test.yml + - travis/minion-files/rancher.list + + It will move: + - odoo/data/demo to odoo/data/sample + - odoo/songs/install/data_demo.py to odoo/songs/sample/data_sample.py + + """ + change_list = [] + # .travis.yml + path = build_path('.travis.yml') + search_replace( + path, + '-e MARABUNTA_MODE=demo', + '-e MARABUNTA_MODE=sample') + change_list.append(path) + + # docker-compose.overide.yml + path = build_path('docker-compose.override.yml') + if os.path.exists(path): + search_replace( + path, + '- MARABUNTA_MODE=demo', + '- MARABUNTA_MODE=sample') + change_list.append(path) + + # odoo/migration.yml + path = MIGRATION_FILE + search_replace( + path, + 'anthem songs.install.data_demo', + 'anthem songs.sample.data_sample') + change_list.append(path) + + yaml = YAML() + # preservation of indentation + yaml.indent(mapping=2, sequence=4, offset=2) + + # change demo: keys to sample: + with open(MIGRATION_FILE) as f: + data = yaml.load(f.read()) + + for x in data['migration']['versions']: + if 'modes' in x and 'demo' in x['modes']: + x['modes']['sample'] = x['modes']['demo'] + del x['modes']['demo'] + + with open(MIGRATION_FILE, 'w') as f: + yaml.dump(data, f) + + # test.yml + path = build_path('odoo/songs/install/data_all.py') + if os.path.exists(path): + search_replace( + path, + "The data loaded here will be loaded in the 'demo' and", + "The data loaded here will be loaded in the 'sample' and") + change_list.append(path) + + # test.yml + path = build_path('test.yml') + if os.path.exists(path): + search_replace( + path, + '- MARABUNTA_MODE=demo', + '- MARABUNTA_MODE=sample') + change_list.append(path) + + # travis/minion-files/rancher.list + path = build_path('travis/minion-files/rancher.list') + if os.path.exists(path): + search_replace( + path, + 'MARABUNTA_MODE=demo', + 'MARABUNTA_MODE=sample') + change_list.append(path) + + ctx.run('git add {}'.format(' '.join(change_list))) + + folder = 'odoo/data/sample' + try: + os.mkdir(folder, 0o775) + except OSError: + print("odoo/data/sample directory already exists") + # move odoo/data/demo to odoo/data/sample + try: + ctx.run( + 'git mv {} {}'.format( + 'odoo/data/demo/*', + 'odoo/data/sample')) + except Exception: + print('nothing to move') + + # move odoo/songs/install/data_demo.py to odoo/songs/sample/data_sample.py + folder = 'odoo/songs/sample' + try: + os.mkdir(folder, 0o775) + with open(folder + '/__init__.py', 'w') as f: + f.write('') + except OSError: + print("odoo/songs/sample directory already exists") + try: + ctx.run( + 'git mv {} {}'.format( + 'odoo/songs/install/data_demo.py', + 'odoo/songs/sample/data_sample.py')) + except Exception: + print('nothing to move') + + # Change strings referencing 'data/demo' to 'data/sample' + path = build_path('odoo/songs/sample/data_sample.py') + if os.path.exists(path): + search_replace( + path, + 'data/demo', + 'data/sample') + change_list.append(path) + + ctx.run('git add odoo/songs/sample') + + print("Deprecation applied") + print() + print("The following files were checked and modified:") + print("- .travis.yml") + print("- docker-compose.overide.yml") + print("- odoo/migration.yml") + print("- odoo/songs/install/data_all.py (for comment)") + print("- odoo/songs/install/data_demo.py (path 'data/demo' to " + "'data/sample')") + print("- test.yml") + print("- travis/minion-files/rancher.list") + + print() + print("The following files were moved:") + print("- odoo/data/demo to odoo/data/sample") + print("- odoo/songs/install/data_demo.py to odoo/songs/sample/data_sample" + ".py") + print() + print("Please check your staged files:") + print(" git diff --cached") + print("Please search for any unchanged 'demo' string in odoo/songs " + "and fix it manually.") + print("If everything is good:") + print(" git commit -m 'Apply depreciation of demo in favor of sample'") + print(" git push") diff --git a/tasks/project.py b/tasks/project.py new file mode 100644 index 0000000..5b8a387 --- /dev/null +++ b/tasks/project.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) +from __future__ import print_function + +import fnmatch +import os +import shutil + +import yaml + +from invoke import task + +from .common import ( + exit_msg, + check_git_diff, + tempdir, + cookiecutter_context, + TEMPLATE_GIT, + cd, + root_path, +) + +try: + from cookiecutter.main import cookiecutter +except ImportError: + cookiecutter = None + + +def _exclude_fnmatch(root, files, exclude): + return list(set(files) - + set([d for d in files for excl in exclude + if fnmatch.fnmatch(os.path.join(root, d), excl)])) + + +def _add_comment_unknown(path, comment): + print('No function to add a comment in {}'.format(path)) + + +def _add_comment_py(path, comment): + with open(path, 'rU') as f: + content = f.readlines() + insert_at = 0 + for index, line in enumerate(content): + if line.startswith('#!'): + insert_at = index + 1 + if 'coding:' in line: + insert_at = index + 1 + break + comment = '\n'.join(['# {}'.format(line) for line in comment.splitlines()]) + comment += '\n' + content.insert(insert_at, comment) + with open(path, 'w') as f: + f.write(''.join(content)) + + +def _add_comment_md(path, comment): + with open(path, 'rU') as f: + content = f.readlines() + insert_at = 0 + comment = '\n'.format(comment) + content.insert(insert_at, comment) + with open(path, 'w') as f: + f.write(''.join(content)) + + +def _add_comment_sh(path, comment): + with open(path, 'rU') as f: + content = f.readlines() + insert_at = 0 + for index, line in enumerate(content): + if line.startswith('#!'): + insert_at = index + 1 + comment = '\n'.join(['# {}'.format(line) for line in comment.splitlines()]) + comment += '\n' + content.insert(insert_at, comment) + with open(path, 'w') as f: + f.write(''.join(content)) + + +def _add_comment_xml(path, comment): + with open(path, 'rU') as f: + content = f.readlines() + insert_at = 0 + for index, line in enumerate(content): + if line.startswith(''): + insert_at = index + 1 + comment = '\n'.format(comment) + content.insert(insert_at, comment) + with open(path, 'w') as f: + f.write(''.join(content)) + + +def add_comment(path, comment): + __, ext = os.path.splitext(path) + funcs = { + '.py': _add_comment_py, + '.md': _add_comment_md, + '.sh': _add_comment_sh, + '.xml': _add_comment_xml, + } + if not ext: + with open(path, 'rU') as f: + line = f.readline() + if line.startswith('#!'): + if 'python' in line: + ext = '.py' + if 'sh' in line: + ext = '.sh' + + funcs.get(ext, _add_comment_unknown)(path, comment) + + +@task +def sync(ctx, commit=True): + """ Sync files from the project template """ + if not cookiecutter: + exit_msg('cookiecutter must be installed') + check_git_diff(ctx, direct_abort=True) + context = cookiecutter_context() + os.chdir(root_path()) + with tempdir() as tmp: + cookiecutter( + TEMPLATE_GIT, + no_input=True, + extra_context=context, + output_dir=tmp, + overwrite_if_exists=True, + ) + template = os.path.join(tmp, context['repo_name']) + selected_files = set() + with cd(template): + with open(os.path.join(template, '.sync.yml'), 'rU') as syncfile: + sync = yaml.load(syncfile.read()) + include = sync['sync'].get('include', []) + exclude = sync['sync'].get('exclude', []) + comment = sync['sync'].get('comment', '') + for root, dirs, files in os.walk('.', topdown=True): + if exclude: + dirs[:] = _exclude_fnmatch(root, dirs, exclude) + files[:] = _exclude_fnmatch(root, files, exclude) + syncfiles = [os.path.join(root, f) for f in files] + for incl in include: + selected_files.update(fnmatch.filter(syncfiles, incl)) + + print('Syncing files:') + for s in sorted(selected_files): + print('* {}'.format(s)) + + for relpath in selected_files: + source = os.path.join(template, relpath) + target_dir = os.path.dirname(relpath) + if not os.path.exists(target_dir): + os.makedirs(target_dir) + shutil.copy(source, relpath) + if os.path.isfile(relpath): + add_comment(relpath, comment) + + ctx.run('git add {}'.format(' '.join(selected_files))) + if commit: + ctx.run('git commit -m "Update project from odoo-template" -e -vv', + pty=True) diff --git a/tasks/release.py b/tasks/release.py new file mode 100644 index 0000000..6b48fa5 --- /dev/null +++ b/tasks/release.py @@ -0,0 +1,196 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) +from __future__ import print_function + +import fileinput +from datetime import date + +try: + from builtins import input +except ImportError: + print('Please install future') + +try: + import yaml +except ImportError: + print('Please install pyyaml') + +from marabunta.version import MarabuntaVersion +from distutils.version import StrictVersion +from invoke import task, exceptions +from .common import ( + PENDING_MERGES, + MIGRATION_FILE, + VERSION_FILE, + HISTORY_FILE, + GIT_REMOTE_NAME, + cookiecutter_context, + current_version, + exit_msg, + check_git_diff, + cd, + build_path +) + + +@task(name='push-branches') +def push_branches(ctx, force=False): + """ Push the local branches to the camptocamp remote + + The branch name will be composed of the id of the project and the current + version number (the one in odoo/VERSION). + + It should be done at the closing of every release, so we are able + to build a new patch branch from the same commits if required. + """ + version = current_version() + project_id = cookiecutter_context()['project_id'] + branch_name = 'merge-branch-{}-{}'.format(project_id, version) + response = input( + 'Push local branches to {}? (Y/n) '.format(branch_name) + ) + if response in ('n', 'N', 'no'): + exit_msg('Aborted') + if not force: + check_git_diff(ctx) + print('Pushing pending-merge branches...') + with open(PENDING_MERGES, 'rU') as f: + merges = yaml.load(f.read()) + if not merges: + print('Nothing to push') + return + for path, setup in merges.items(): + print('pushing {}'.format(path)) + with cd(build_path(path, from_file=PENDING_MERGES)): + try: + ctx.run( + 'git config remote.{}.url'.format(GIT_REMOTE_NAME) + ) + except exceptions.Failure: + remote_url = setup['remotes'][GIT_REMOTE_NAME] + ctx.run( + 'git remote add {} {}'.format(GIT_REMOTE_NAME, + remote_url) + ) + ctx.run( + 'git push -f -v {} HEAD:refs/heads/{}' + .format(GIT_REMOTE_NAME, branch_name) + ) + + +def release_get_next_version3digits(old_version, feature=True, patch=False): + """Backward compat for old 3-digits versionins. + + TODO: trash it as we move all projects to 5 digits. + """ + warning = ( + 'You are still using OLD 3-digits versioning. ' + 'Please, consider moving to new versioning w/ 5 digits.' + ) + print() + print('!' * len(warning)) + print(warning) + print('!' * len(warning)) + print() + try: + version = StrictVersion(old_version).version + except ValueError: + exit_msg("'{}' is not a valid version".format(old_version)) + if feature: + new_version = (version[0], version[1] + 1, 0) + elif patch: + new_version = (version[0], version[1], version[2] + 1) + return '.'.join([str(v) for v in new_version]) + + +def release_get_next_version( + old_version, major=False, feature=True, patch=False): + if len(old_version.split('.')) == 3: + if major: + # not supported here + feature = True + return release_get_next_version3digits( + old_version, feature=feature, patch=patch) + try: + version = MarabuntaVersion(old_version).version + except ValueError: + exit_msg("'{}' is not a valid version".format(old_version)) + if major: + new_version = list(version[:2]) + [version[2] + 1, 0, 0] + elif feature: + new_version = list(version[:-2]) + [version[-2] + 1, 0] + elif patch: + new_version = list(version[:-1]) + [version[-1] + 1] + return '.'.join([str(v) for v in new_version]) + + +@task +def bump(ctx, major=False, feature=False, patch=False, print_only=False): + """ Increase the version number where needed """ + if not (major or feature or patch): + exit_msg("should be a --major or --feature or a --patch version") + old_version = current_version() + if not old_version: + exit_msg("the version file is empty") + + version = release_get_next_version( + old_version, major=major, feature=feature, patch=patch) + + + print('Increasing version number from {} ' + 'to {}...'.format(old_version, version)) + print() + if print_only: + exit_msg('PRINT ONLY mode on. Exiting...') + + try: + ctx.run(r'grep --quiet --regexp "- version:.*{}" {}'.format( + version, + MIGRATION_FILE + )) + except exceptions.Failure: + with open(MIGRATION_FILE, 'a') as fd: + fd.write(' - version: {}\n'.format(version)) + + with open(VERSION_FILE, 'w') as fd: + fd.write(version + '\n') + + new_version_index = None + for index, line in enumerate(fileinput.input(HISTORY_FILE, inplace=True)): + # Weak heuristic to find where we should write the new version + # header, anyway, it will need manual editing to have a proper + # changelog + if 'unreleased' in line.lower(): + # place the new header 2 lines after because we have the + # underlining + new_version_index = index + 2 + if index == new_version_index: + today = date.today().strftime('%Y-%m-%d') + new_version_header = "{} ({})".format(version, today) + print("\n**Features and Improvements**\n\n" + "**Bugfixes**\n\n" + "**Build**\n\n" + "**Documentation**\n\n\n" + "{}\n" + "{}".format(new_version_header, + '+' * len(new_version_header))) + + print(line, end='') + + push_branches(ctx, force=True) + + print() + print('** Version changed to {} **'.format(version)) + print() + print('Please continue with the release by:') + print() + print(' * Cleaning HISTORY.rst. Remove the empty sections, empty lines...') + print(' * Check the diff then run:') + print(' git add ... # pick the files ') + print(' git commit -m"Release {}"'.format(version)) + print(' git tag -a {} ' + '# optionally -s to sign the tag'.format(version)) + print(' # copy-paste the content of the release from HISTORY.rst' + ' in the annotation of the tag') + print(' git push --tags && git push') \ No newline at end of file diff --git a/tasks/requirements.txt b/tasks/requirements.txt new file mode 100644 index 0000000..9d7dd17 --- /dev/null +++ b/tasks/requirements.txt @@ -0,0 +1,13 @@ +cookiecutter +future +git-aggregator +invoke +pyyaml +requests +ruamel.yaml +gnupg +psycopg2 + +# marabunta new versioning +marabunta >= 0.9.0 + diff --git a/tasks/songs.py b/tasks/songs.py new file mode 100644 index 0000000..7418cd8 --- /dev/null +++ b/tasks/songs.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +# This file has been generated with 'invoke project.sync'. +# Do not modify. Any manual change will be lost. +# Copyright 2016 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) +import io +import zipfile + +try: + # Python 2 + from urlparse import urlparse +except ImportError: + # Python 3 + from urllib.parse import urlparse +try: + import requests +except ImportError: + print('Please install `requests`') + +from invoke import task +from .common import exit_msg + + +def odoo_login(base_url, login, password, db): + """ Get a session_id from Odoo """ + url = "%s/web/session/authenticate" % base_url + + data = { + 'jsonrpc': '2.0', + 'params': { + 'context': {}, + 'db': db, + 'login': login, + 'password': password, + }, + } + + headers = { + 'Content-type': 'application/json' + } + + resp = requests.post(url, json=data, headers=headers) + r_data = resp.json() + return r_data['result']['session_id'] + + +@task(name='rip') +def rip(ctx, location, login='admin', password='admin', + db='odoodb', dryrun=False, data_path='./odoo/data'): + """Open or download a zipfile containing songs. + + Unzip and copy the files into current project path. + + :param location: compilation URL or file path + :param login: odoo username required if location is an URL + :param password: odoo username password required if location is an URL + :param odoodb: odoo database required if location is an URL + :param dry_run: just print the compilation content + do not add files to project. + """ + if not location: + exit_msg( + "You must provide a value for --location\n" + "It can be an url or a local path\n\n" + "invoke songs.rip /tmp/songs.zip\n" + "invoke songs.rip " + "http://project:8888/dj/download/compilation/account-default-1") + zipdata = None + # download file from url + if location.startswith('http'): + url = urlparse(location) + base_url = "%s://%s" % (url.scheme, url.netloc) + session_id = odoo_login(base_url, login, password, db) + cookies = { + "session_id": session_id, + } + resp = requests.get(location, cookies=cookies) + resp.raise_for_status() + zipdata = io.BytesIO() + zipdata.write(resp.content) + else: + zipdata = location + handle_zip_data(zipdata, dryrun=dryrun, data_path=data_path) + + +def handle_zip_data(zipdata, dryrun=False, data_path='./odoo/data'): + if dryrun: + print("Dry-run mode activated: no file will be extracted.") + zf = zipfile.ZipFile(zipdata) + + # Unzip file and push files at the right path + readme_path = None + for path in zf.namelist(): + if dryrun: + print(path) + # Ignore dj metadata zip file + if path.endswith('zip'): + continue + if 'DEV_README.rst' in path: + readme_path = path + else: + if not dryrun: + dest_path = data_path[:] + if path.startswith('songs'): + # TODO: we assume songs path + # is on the same level of data path + dest_path = '/'.join(data_path.split('/')[:2]) + print("Extracting %s/%s" % (dest_path, path)) + zf.extract(path, dest_path) + + print('-' * 79) + # Print README file + readme_content = zf.open(readme_path).read() + readme_content = readme_content.decode('utf-8') + print(readme_content) diff --git a/tasks/submodule.py b/tasks/submodule.py new file mode 100644 index 0000000..7ef48b5 --- /dev/null +++ b/tasks/submodule.py @@ -0,0 +1,233 @@ +# Copyright 2017 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) +from __future__ import print_function + +import logging +import re +import os +from itertools import chain + +from invoke import task, exceptions +try: + import git_aggregator.config + import git_aggregator.main + import git_aggregator.repo +except ImportError: + print('Please install git-aggregator') + +from .common import ( + ask_or_abort, + build_path, + cookiecutter_context, + cd, + exit_msg, + get_aggregator_repo, + get_aggregator_repositories, + root_path, +) + +BRANCH_EXCLUDE = """ +branches: + except: + - /^merge-branch-.*$/ +""" + + +def get_target_branch(ctx, target_branch=None): + """Gets the branch to push on and checks if we're overriding something. + + If target_branch is given only checks for the override. + Otherwise create the branch name and check for the override. + """ + current_branch = ctx.run( + 'git symbolic-ref --short HEAD', hide=True).stdout.strip() + project_id = cookiecutter_context()['project_id'] + if not target_branch: + commit = ctx.run('git rev-parse HEAD', hide=True).stdout.strip()[:8] + target_branch = 'merge-branch-{}-{}-{}'.format( + project_id, current_branch, commit) + if current_branch == 'master' or re.match(r'\d{1,2}.\d', target_branch): + ask_or_abort('You are on branch {}.' + ' Please confirm override of target branch {}'.format( + current_branch, target_branch + )) + return target_branch + + +@task +def init(ctx): + """ Add git submodules read in the .gitmodules files + + Allow to edit the .gitmodules file, add all the repositories and + run the command once to add all the submodules. + + It means less 'git submodule add -b ... {url} {path}' commands to run + + """ + gitmodules = build_path('.gitmodules') + res = ctx.run(r"git config -f %s --get-regexp '^submodule\..*\.path$'" % + gitmodules, hide=True) + odoo_version = cookiecutter_context()['odoo_version'] + with cd(root_path()): + for line in res.stdout.splitlines(): + path_key, path = line.split() + url_key = path_key.replace('.path', '.url') + url = ctx.run('git config -f %s --get "%s"' % + (gitmodules, url_key), hide=True).stdout + try: + ctx.run('git submodule add -b %s %s %s' % + (odoo_version, url.strip(), path.strip())) + except exceptions.Failure: + pass + + print("Submodules added") + print() + print("You can now update odoo/Dockerfile with this addons-path:") + print() + list(ctx) + + +@task(help={ + 'dockerfile': 'With --no-dockerfile, the raw paths are listed instead ' + 'of the Dockerfile format' +}) +def list(ctx, dockerfile=True): + """ list git submodules paths + + It can be used to directly copy-paste the addons paths in the Dockerfile. + The order depends of the order in the .gitmodules file. + + """ + gitmodules = build_path('.gitmodules') + res = ctx.run( + "git config --file %s " + "--get-regexp path | awk '{ print $2 }' " % gitmodules, + hide=True, + ) + content = res.stdout + if dockerfile: + blacklist = {'odoo/src'} + lines = (line for line in content.splitlines() + if line not in blacklist) + lines = chain(lines, ['odoo/src/addons', 'odoo/local-src']) + lines = ("/%s" % line for line in lines) + template = ( + "ENV ADDONS_PATH=\"%s\" \\\n" + ) + print(template % (', \\\n'.join(lines))) + else: + print(content) + + +@task +def merges(ctx, submodule_path, push=True, target_branch=None): + """ Regenerate a pending branch for a submodule + + It reads pending-merges.yaml, runs gitaggregator on the submodule and + pushes the new branch on + camptocamp/merge-branch-- + + By default, the branch is pushed on the camptocamp remote, but you + can disable the push with ``--no-push``. + + Example: + 1. Run: git checkout -b my-new-feature-branch + 2. Add pending-merge in odoo/pending-merges.yaml + 3. Run: invoke submodule.merges odoo/external-src/sale-workflow + 4. Run: git add odoo/pending-merges.yaml odoo/external-src/sale-workflow + 5. Run: git commit -m"add PR #XX in sale-workflow" + 6. Create pull request for inclusion in master branch + + Beware, if you changed the remote of the submodule, you still need + to edit it manually in the ``.gitmodules`` file. + """ + git_aggregator.main.setup_logger() + repo = get_aggregator_repo(submodule_path) + target_branch = get_target_branch(ctx, target_branch) + + print('Building and pushing to camptocamp/{}'.format(target_branch)) + print() + repo.cwd = build_path(submodule_path) + repo.target['branch'] = target_branch + repo.aggregate() + + process_travis_file(ctx, repo) + if push: + repo.push() + + +@task +def push(ctx, submodule_path, target_branch=None): + """Push a Submodule + + Pushes the current state of your submodule to the target remote and branch + either given by you or specified in pending-merges.yml + """ + git_aggregator.main.setup_logger() + repo = get_aggregator_repo(submodule_path) + target_branch = get_target_branch(ctx, target_branch) + print('Pushing to {}/{}'.format(repo.target['remote'], target_branch)) + print() + repo.cwd = build_path(submodule_path) + repo.target['branch'] = target_branch + with cd(submodule_path): + repo._switch_to_branch(target_branch) + process_travis_file(ctx, repo) + repo.push() + + +def process_travis_file(ctx, repo): + tf = '.travis.yml' + with cd(repo.cwd): + if not os.path.exists(tf): + print(repo.cwd + tf, + 'does not exists. Skipping travis exclude commit') + return + + print("Writing exclude branch option in {}".format(tf)) + with open(tf, 'a') as travis: + travis.write(BRANCH_EXCLUDE) + + cmd = 'git commit {} -m "Travis: exclude new branch from build"' + commit = ctx.run(cmd.format(tf), hide=True) + print("Committed as:\n{}".format(commit.stdout.strip())) + + +@task +def show_closed_prs(ctx, submodule_path='all'): + """Show all closed pull requests in pending merges. + + Pass nothing to check all submodules. + Pass `-s path/to/submodule` to check specific ones. + """ + git_aggregator.main.setup_logger() + logging.getLogger('requests').setLevel(logging.ERROR) + if submodule_path == 'all': + repositories = get_aggregator_repositories() + else: + repositories = [get_aggregator_repo(submodule_path)] + if not repositories: + exit_msg('No repo to check.') + try: + for repo in repositories: + print('Checking', repo.cwd) + repo.show_closed_prs() + except AttributeError: + print('You need to upgrade git-aggregator.' + ' This function is available since 1.2.0.') + + +@task +def update(ctx, submodule_path=None): + """Synchronize and update given submodule path + + :param submodule_path: submodule path for a precise sync & update + """ + sync_cmd = 'git submodule sync' + update_cmd = 'git submodule update --init' + if submodule_path is not None: + sync_cmd += ' -- {}'.format(submodule_path) + update_cmd += ' -- {}'.format(submodule_path) + with cd(root_path()): + ctx.run(sync_cmd) + ctx.run(update_cmd) diff --git a/tasks/translate.py b/tasks/translate.py new file mode 100644 index 0000000..feb72c1 --- /dev/null +++ b/tasks/translate.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) +from __future__ import print_function + +import glob +import os + +from invoke import task + +from .common import build_path + + +@task(default=True) +def generate(ctx, addon_path, update_po=True): + """ Generate pot template and merge it in language files + + Example: + + $ invoke translate.generate odoo/local-src/my_module + """ + dbname = 'tmp_generate_pot' + addon = addon_path.strip('/').split('/')[-1] + assert os.path.exists(build_path(addon_path)), "%s not found" % addon_path + container_path = os.path.join('/', addon_path, 'i18n') + i18n_dir = os.path.join(build_path(addon_path), 'i18n') + if not os.path.exists(i18n_dir): + os.mkdir(i18n_dir) + container_po_path = os.path.join(container_path, '%s.po' % addon) + user_id = ctx.run('id --user', hide='both').stdout.strip() + cmd = ('docker-compose run --rm -e LOCAL_USER_ID=%(user)s ' + '-e DEMO=False -e MIGRATE=False odoo odoo ' + '--log-level=warn --workers=0 ' + '--database %(dbname)s --i18n-export=%(path)s ' + '--modules=%(addon)s --stop-after-init --without-demo=all ' + '--init=%(addon)s') % {'user': user_id, 'path': container_po_path, + 'dbname': dbname, 'addon': addon} + ctx.run(cmd) + + ctx.run('docker-compose run --rm -e PGPASSWORD=odoo odoo ' + 'dropdb %s -U odoo -h db' % dbname) + + # mv .po to .pot + source = os.path.join(i18n_dir, '%s.po' % addon) + pot_file = source + 't' + # dirty hack to remove duplicated entries for paths + ctx.run('mv %s %s' % (source, pot_file)) + ctx.run('sed -i "/local-src\|external-src/d" %(pot)s' % + {'pot': pot_file, }) + + if update_po: + for po_file in glob.glob('%s/*.po' % i18n_dir): + ctx.run('msgmerge %(po)s %(pot)s -o %(po)s' % + {'po': po_file, 'pot': pot_file}) + # dirty hack to remove duplicated entries for paths + ctx.run('sed -i "/local-src\|external-src/d" %(po)s' % + {'po': po_file, }) + print('%s.pot generated' % addon) diff --git a/test.yml b/test.yml new file mode 100644 index 0000000..81b5e7e --- /dev/null +++ b/test.yml @@ -0,0 +1,16 @@ +# For testers +# This file should be called with +# docker-compose -f docker-compose.yml -f test.yml up + +version: '2' +services: + odoo: + image: camptocamp/geo_11_odoo:latest + environment: + - MARABUNTA_MODE=sample # could be 'migration' to run the migration + - MARABUNTA_ALLOW_SERIE=True # should not be set in production + + nginx: + ports: + - 80:80 + # TODO: 443 diff --git a/travis/docker-compose.yml b/travis/docker-compose.yml new file mode 100644 index 0000000..f923bce --- /dev/null +++ b/travis/docker-compose.yml @@ -0,0 +1,34 @@ +# docker-compose file used by travis +# docker-compose.override.yml is automatically used by docker-compose when no +# option -f is provided + +version: '2' +services: + odoo: + build: ../odoo/ + depends_on: + - db + volumes: + - "data-odoo:/data/odoo" + - "$HOME/.cachedb:/.cachedb" + environment: + DB_USER: odoo + DB_PASSWORD: odoo + DB_NAME: odoodb + ADMIN_PASSWD: admin + RUNNING_ENV: dev + ODOO_CLOUD_PLATFORM_UNSAFE: 1 + CI: 'True' + + db: + image: camptocamp/postgres:9.5 + command: -c shared_buffers=256MB -c maintenance_work_mem=256MB -c wal_buffers=8MB -c effective_cache_size=1024MB + environment: + POSTGRES_USER: odoo + POSTGRES_PASSWORD: odoo + volumes: + - "data-db:/var/lib/postgresql/data" + +volumes: + data-odoo: + data-db: diff --git a/travis/git_submodule_update.py b/travis/git_submodule_update.py new file mode 100755 index 0000000..d81d5cc --- /dev/null +++ b/travis/git_submodule_update.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Download submodules from Github zip archive url +# Keep standard update form private repositories +# listed in `travis/private_repo` +# +import os +import shutil +import urllib2 +import yaml +import zipfile + +from git import Repo + +https_proxy = os.environ.get('https_proxy') +if https_proxy: + proxy = urllib2.ProxyHandler({'https': https_proxy}) + opener = urllib2.build_opener(proxy) + urllib2.install_opener(opener) + +DL_DIR = 'download' +ZIP_PATH = '%s/submodule.zip' % DL_DIR + +if os.path.exists(DL_DIR): # clean if previous build failed + shutil.rmtree(DL_DIR) +os.makedirs(DL_DIR) + +with open('travis/private_repos') as f: + private_repos = f.read() + +os.system('git submodule init') + +submodules = Repo('.').submodules + + +def git_url(url): + """ Change an url to https and ending without .git all in lower case + This to reuse it for archive download and to make it comparable. + """ + url = url.lower() + if url.startswith('git@github.com:'): + url = url.replace('git@github.com:', 'https://github.com/') + # remove .git + if url.endswith('.git'): + url = url[:-4] + return url + + +# Check consitancy between .gitmodules and pending-merges.yaml +with open('odoo/pending-merges.yaml') as pending_yml: + pending_merges = yaml.safe_load(pending_yml) or [] + +for sub in submodules: + # replace odoo/ by ./ + pending_path = "." + sub.path[4:] + if pending_path in pending_merges: + pending = pending_merges[pending_path] + target = pending['target'].split()[0] + target_remote = pending['remotes'][target] + assert git_url(target_remote) == git_url(sub.url.lower()), ( + "In .gitmodules %s :\n" + " remote url %s does not match \n" + " target url %s \n" + "in pending-merges.yaml\n" + "\n" + "If you added pending merges entries you probably forgot to edit" + " target in .gitmodules file to match the fork repository\n" + "or if your intent is to clean up entries in pending-merges.yaml" + " something went wrong in that file" + ) % (sub.path, target_remote, sub.url) + + +for sub in submodules: + print "Getting submodule %s" % sub.path + use_archive = sub.path not in private_repos + if use_archive: + url = git_url(sub.url) + archive_url = "%s/archive/%s.zip" % (url, sub.hexsha) + request = urllib2.Request(archive_url) + with open(ZIP_PATH, 'wb') as f: + f.write(urllib2.urlopen(request).read()) + try: + with zipfile.ZipFile(ZIP_PATH) as zf: + zf.extractall(DL_DIR) + except zipfile.BadZipfile: + # fall back to standard download + use_archive = False + with open(ZIP_PATH) as f: + print ("Getting archive failed with error %s. Falling back to " + "git clone." % f.read()) + os.remove(ZIP_PATH) + except Exception as e: + use_archive = False + print ("Getting archive failed with error %s. Falling back to " + "git clone." % e.message) + else: + os.remove(ZIP_PATH) + os.removedirs(sub.path) + submodule_dir = os.listdir(DL_DIR)[0] + shutil.move(os.path.join(DL_DIR, submodule_dir), sub.path) + if not use_archive: + os.system('git submodule update %s' % sub.path) diff --git a/travis/minion-client.py b/travis/minion-client.py new file mode 100755 index 0000000..eaf476e --- /dev/null +++ b/travis/minion-client.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright 2017 Camptocamp SA +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +from __future__ import print_function + +import argparse +import os + +import requests + + +parser = argparse.ArgumentParser() +parser.add_argument('docker_tag', type=str, + help="Docker Tag") +parser.add_argument('minion_server', type=str, + help="Rancher Minion Server") +parser.add_argument('authorization_token', type=str, + help="Minion Server Authorization Token") +parser.add_argument('files', type=str, nargs='+', + help="Repeat the argument for the different files. " + "Files needed: 'docker-compose.yml', " + "'rancher-compose.yml', 'rancher.list'") + +args = parser.parse_args() + +docker_tag = args.docker_tag +url = args.minion_server +token = args.authorization_token +files = args.files + +if not url.startswith('https://'): + print('https:// is mandatory') + exit(1) + +files_content = { + os.path.basename(path): open(os.path.abspath(path), 'rb') + for path in files +} + +payload = { + 'docker_tag': docker_tag, + 'branch': os.environ.get('TRAVIS_BRANCH'), + 'commit': os.environ.get('TRAVIS_COMMIT'), + 'commit_message': os.environ.get('TRAVIS_COMMIT_MESSAGE'), + 'build_id': os.environ.get('TRAVIS_BUILD_ID'), +} + +response = requests.post( + url + '/new', + data=payload, + files=files_content, + headers={'Authorization': token} +) +print(response.status_code) +response.raise_for_status() diff --git a/travis/minion-files/docker-compose.yml b/travis/minion-files/docker-compose.yml new file mode 100644 index 0000000..e2acd39 --- /dev/null +++ b/travis/minion-files/docker-compose.yml @@ -0,0 +1,55 @@ +version: '2' +services: + + odoo: + image: camptocamp/geo_11_odoo:${DOCKER_TAG} + command: "odoo --load=web,web_kanban,attachment_s3,session_redis,logging_json" + links: + - db:db + environment: + - DB_USER=${DB_USER} + - DB_NAME=${DB_NAME} + - DB_PORT=${DB_PORT} + - RUNNING_ENV=${RUNNING_ENV} + - DEMO=${DEMO} + - WORKERS=${WORKERS} + - MAX_CRON_THREADS=${MAX_CRON_THREADS} + - LOG_LEVEL=${LOG_LEVEL} + - LOG_HANDLER=${LOG_HANDLER} + - DB_MAXCONN=${DB_MAXCONN} + - LIMIT_MEMORY_SOFT=${LIMIT_MEMORY_SOFT} + - LIMIT_MEMORY_HARD=${LIMIT_MEMORY_HARD} + - LIMIT_REQUEST=${LIMIT_REQUEST} + - LIMIT_TIME_CPU=${LIMIT_TIME_CPU} + - LIMIT_TIME_REAL=${LIMIT_TIME_REAL} + - MARABUNTA_ALLOW_SERIE=True + - MARABUNTA_MODE=${MARABUNTA_MODE} + - ODOO_SESSION_REDIS=${ODOO_SESSION_REDIS} + - ODOO_SESSION_REDIS_HOST=${ODOO_SESSION_REDIS_HOST} + - ODOO_SESSION_REDIS_EXPIRATION=${ODOO_SESSION_REDIS_EXPIRATION} + - ODOO_LOGGING_JSON=${ODOO_LOGGING_JSON} + - ODOO_CLOUD_PLATFORM_UNSAFE=${ODOO_CLOUD_PLATFORM_UNSAFE} + # configured by rancher-minion + - ADMIN_PASSWD=${ADMIN_PASSWD} + - DB_PASSWORD=${DB_PASSWORD} + - ODOO_BASE_URL=${ODOO_BASE_URL} + - ODOO_SESSION_REDIS_PREFIX=${ODOO_SESSION_REDIS_PREFIX} + labels: + io.rancher.sidekicks: nginx + odoo.publish: true + + + nginx: + image: camptocamp/odoo-nginx:10.0-1.1.0 + network_mode: "container:odoo" + environment: + - NGX_ODOO_HOST=127.0.0.1 + links: + - odoo:odoo + + + db: + image: postgres:9.6 + environment: + - POSTGRES_USER=${DB_USER} + - POSTGRES_PASSWORD=${DB_PASSWORD} diff --git a/travis/minion-files/rancher-compose.yml b/travis/minion-files/rancher-compose.yml new file mode 100644 index 0000000..9a8c753 --- /dev/null +++ b/travis/minion-files/rancher-compose.yml @@ -0,0 +1,8 @@ +version: '2' +services: + + odoo: + lb_config: + port_rules: + - target_port: 80 + hostname: ${DOMAIN_NAME} diff --git a/travis/minion-files/rancher.list b/travis/minion-files/rancher.list new file mode 100644 index 0000000..847c826 --- /dev/null +++ b/travis/minion-files/rancher.list @@ -0,0 +1,26 @@ +DB_USER=odoo +DB_NAME=odoodb +DB_PORT=5432 + +RUNNING_ENV=test +WORKERS=2 +MAX_CRON_THREADS=1 +LOG_LEVEL=info +LOG_HANDLER=":INFO" +DB_MAXCONN=5 +LIMIT_MEMORY_SOFT=629145600 +LIMIT_MEMORY_HARD=1572864000 +LIMIT_TIME_CPU=86400 +LIMIT_TIME_REAL=86400 +LIMIT_REQUEST=8192 +DEMO=False +MARABUNTA_MODE=sample + +ODOO_SESSION_REDIS=1 +ODOO_SESSION_REDIS_HOST=redis.redis +ODOO_SESSION_REDIS_EXPIRATION=86400 + +ODOO_LOGGING_JSON=1 + +# activated, platform checks are not performed, use for debug +ODOO_CLOUD_PLATFORM_UNSAFE=0 diff --git a/travis/private_repos b/travis/private_repos new file mode 100644 index 0000000..d5c8878 --- /dev/null +++ b/travis/private_repos @@ -0,0 +1,2 @@ +odoo/external-src/enterprise +odoo/external-src/odoo-cloud-platform diff --git a/travis/publish.sh b/travis/publish.sh new file mode 100755 index 0000000..d252efa --- /dev/null +++ b/travis/publish.sh @@ -0,0 +1,45 @@ +#!/bin/bash -e + +local_dir="$(dirname "$0")" + +function deploy { + local tag=$1 + + echo "Pushing image to docker hub ${DOCKER_HUB_REPO}:${tag}" + docker tag ${GENERATED_IMAGE} ${DOCKER_HUB_REPO}:${tag} + docker push ${DOCKER_HUB_REPO}:${tag} + + echo "Creating a minion for ${tag} on ${TRAVIS_BRANCH}" + $local_dir/minion-client.py \ + ${tag} \ + ${RANCHER_MINION_SERVER} \ + ${minion_server_token} \ + $local_dir/minion-files/docker-compose.yml \ + $local_dir/minion-files/rancher-compose.yml \ + $local_dir/minion-files/rancher.list + +} + +if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then + docker login --username="$DOCKER_USERNAME" --password="$DOCKER_PASSWORD" + docker_tag=r-$TRAVIS_BRANCH-$TRAVIS_COMMIT + + if [ "$TRAVIS_BRANCH" == "master" ]; then + echo "Pushing image to docker hub ${DOCKER_HUB_REPO}:latest" + docker tag ${GENERATED_IMAGE} ${DOCKER_HUB_REPO}:latest + docker push "${DOCKER_HUB_REPO}:latest" + + deploy ${docker_tag} + + elif [ ! -z "$TRAVIS_TAG" ]; then + echo "Pushing image to docker hub ${DOCKER_HUB_REPO}:${TRAVIS_TAG}" + docker tag ${GENERATED_IMAGE} ${DOCKER_HUB_REPO}:${TRAVIS_TAG} + docker push "${DOCKER_HUB_REPO}:${TRAVIS_TAG}" + + elif [ ! -z "$TRAVIS_BRANCH" ]; then + deploy ${docker_tag} + + else + echo "Not deploying image" + fi +fi diff --git a/travis/pylintrc b/travis/pylintrc new file mode 100644 index 0000000..a3cad70 --- /dev/null +++ b/travis/pylintrc @@ -0,0 +1,260 @@ +######################## + +# Configuration for pylint +# This used by pylint https://docs.pylint.org/en/1.9/technical_reference/features.html +# flaouvred with the OCA's pylint-odoo https://github.com/OCA/pylint-odoo#pylint-odoo-plugin + +######################## + +[MASTER] + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Profiled execution. +profile=no + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Pickle collected data for later comparisons. +persistent=yes + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins=pylint_odoo + + +[MESSAGES CONTROL] +# first one are standard pylint errors +disable=missing-docstring, + too-few-public-methods, + import-error, + protected-access, # Easy to solve. But with our actual codestyle, it may cause a lot of false positive + wrong-import-order, # I don't like this one. Can we ignore it? + bad-continuation, + duplicate-code # Could be ignored only on __openerp__.py files + manifest-required-author, # OCA + website-manifest-key-not-valid-uri, # OCA + manifest-deprecated-key, # OCA + attribute-string-redundant, # OCA + missing-readme, # OCA + missing-return, # OCA + odoo-addons-relative-import, # OCA + + +[REPORTS] + +output-format=colorized +files-output=no + +# Tells whether to display a full report or only the messages +reports=no + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Add a comment according to your evaluation note. This is used by the global +# evaluation report (RP0004). +comment=no + +# Template used to display messages. This is a python new-style format string +# used to format the massage information. See doc for all details +msg-template={path}:{line}: [{msg_id}({symbol}), {obj}] {msg} + + +[BASIC] + +# Required attributes for module, separated by a comma +required-attributes= + +# List of builtins function names that should not be used, separated by a comma +bad-functions=map,filter,apply,input + +# Regular expression which should only match correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression which should only match correct module level names +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Regular expression which should only match correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression which should only match correct function names +function-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct method names +method-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct instance attribute names +attr-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct argument names +argument-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct variable names +variable-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct attribute names in class +# bodies +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Regular expression which should only match correct list comprehension / +# generator expression variable names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=__.*__ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + + +[VARIABLES] + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# A regular expression matching the beginning of the name of dummy variables +# (i.e. not used). +dummy-variables-rgx=_|dummy + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + + +[SIMILARITIES] + +# Minimum lines number of a similarity. +min-similarity-lines=4 + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=79 + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Maximum number of lines in a module +max-module-lines=1000 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + + +[TYPECHECK] + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# List of classes names for which member attributes should not be checked +# (useful for classes with attributes dynamically set). +ignored-classes=SQLObject + + +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub,TERMIOS,Bastion,rexec + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.* + +# Maximum number of locals for function / method body +max-locals=15 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of branch for function / method body +max-branches=12 + +# Maximum number of statements in function / method body +max-statements=50 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception