diff --git a/.ddev/commands/.gitattributes b/.ddev/commands/.gitattributes new file mode 100755 index 00000000..0d33f23b --- /dev/null +++ b/.ddev/commands/.gitattributes @@ -0,0 +1,5 @@ +# #ddev-generated +# Everything in the commands directory needs LF line-endings +# Not CRLF as from Windows. +# bash especially just can't cope if it finds CRLF in a script. +* -text eol=lf diff --git a/.ddev/commands/db/README.txt b/.ddev/commands/db/README.txt new file mode 100755 index 00000000..1688b76f --- /dev/null +++ b/.ddev/commands/db/README.txt @@ -0,0 +1,5 @@ +#ddev-generated +Scripts in this directory will be executed inside the db +container. A number of environment variables are supplied to the scripts. + +See https://ddev.readthedocs.io/en/stable/users/extend/custom-commands/#environment-variables-provided for a list of environment variables. diff --git a/.ddev/commands/host/README.txt b/.ddev/commands/host/README.txt new file mode 100755 index 00000000..f1204f64 --- /dev/null +++ b/.ddev/commands/host/README.txt @@ -0,0 +1,6 @@ +#ddev-generated +Scripts in this directory will be executed on the host +but they can take easily take action on containers by using +`ddev exec`. + +See https://ddev.readthedocs.io/en/stable/users/extend/custom-commands/#environment-variables-provided for a list of environment variables that can be used in the scripts. diff --git a/.ddev/commands/host/solrtail.example b/.ddev/commands/host/solrtail.example new file mode 100755 index 00000000..4cf1b9c6 --- /dev/null +++ b/.ddev/commands/host/solrtail.example @@ -0,0 +1,11 @@ +#!/bin/bash + +## #ddev-generated +## Description: Tail the main solr log +## Usage: solrtail +## Example: ddev solrtail + +# This can't work unless you have a solr service, +# See https://ddev.readthedocs.io/en/stable/users/extend/additional-services/ + +ddev exec -s solr tail -40lf /opt/solr/server/logs/solr.log diff --git a/.ddev/commands/solr/README.txt b/.ddev/commands/solr/README.txt new file mode 100755 index 00000000..8777b61e --- /dev/null +++ b/.ddev/commands/solr/README.txt @@ -0,0 +1,15 @@ +#ddev-generated +Scripts in this directory will be executed inside the solr +container (if it exists, of course). This is just an example, +but any named service can have a directory with commands. + +Note that /mnt/ddev_config must be mounted into the 3rd-party service +with a stanza like this in the docker-compose.solr.yaml: + + volumes: + - type: "bind" + source: "." + target: "/mnt/ddev_config" + + +See https://ddev.readthedocs.io/en/stable/users/extend/custom-commands/#environment-variables-provided for a list of environment variables that can be used in the scripts. diff --git a/.ddev/commands/solr/solrtail.example b/.ddev/commands/solr/solrtail.example new file mode 100755 index 00000000..309a07a7 --- /dev/null +++ b/.ddev/commands/solr/solrtail.example @@ -0,0 +1,13 @@ +#!/bin/bash + +## #ddev-generated +## Description: Tail the main solr log +## Usage: solrtail +## Example: ddev solrtail + +# This example runs inside the solr container. +# Note that this requires that /mnt/ddev_config be mounted +# into the solr container and of course that you have a container +# named solr. + +tail -f /opt/solr/server/logs/solr.log diff --git a/.ddev/commands/web/README.txt b/.ddev/commands/web/README.txt new file mode 100755 index 00000000..e0c4a052 --- /dev/null +++ b/.ddev/commands/web/README.txt @@ -0,0 +1,4 @@ +#ddev-generated +Scripts in this directory will be executed inside the web container. + +See https://ddev.readthedocs.io/en/stable/users/extend/custom-commands/#environment-variables-provided for a list of environment variables that can be used in the scripts. diff --git a/.ddev/homeadditions/README.txt b/.ddev/homeadditions/README.txt new file mode 100755 index 00000000..2dec556e --- /dev/null +++ b/.ddev/homeadditions/README.txt @@ -0,0 +1,7 @@ +#ddev-generated +Files in .ddev/homeadditions will be copied into the web container's home directory. + +An example bash_aliases.example is provided here. To make this file active you can either + +cp bash_aliases.example .bash_aliases +or ln -s bash_aliases.example .bash_aliases diff --git a/.ddev/homeadditions/bash_aliases.example b/.ddev/homeadditions/bash_aliases.example new file mode 100755 index 00000000..85a41ed0 --- /dev/null +++ b/.ddev/homeadditions/bash_aliases.example @@ -0,0 +1,6 @@ +# #ddev-generated +# To make this file active you can either +# cp bash_aliases.example .bash_aliases +# or ln -s bash_aliases.example .bash_aliases + +alias ll="ls -lhA" diff --git a/.ddev/providers/README.txt b/.ddev/providers/README.txt new file mode 100755 index 00000000..82974758 --- /dev/null +++ b/.ddev/providers/README.txt @@ -0,0 +1,34 @@ +Providers README +================ + +#ddev-generated + +## Introduction to Hosting Provider Integration + +DDEV's hosting provider integration lets you integrate with any upstream source of database dumps and files (such as your production or staging server) and provides examples of configuration for Acquia, Platform.sh, Pantheon, rsync, etc. + +The best part of this is you can change them and adapt them in any way you need to, they're all short scripted recipes. There are several example recipes created in the .ddev/providers directory of every project or see them in the code at https://github.com/ddev/ddev/tree/master/cmd/ddev/cmd/dotddev_assets/providers. + +ddev provides the `pull` command with whatever recipes you have configured. For example, `ddev pull acquia` if you have created `.ddev/providers/acquia.yaml`. + +ddev also provides the `push` command to push database and files to upstream. This is very dangerous to your upstream site and should only be used with extreme caution. It's recommended not even to implement the push stanzas in your yaml file, but if it fits your workflow, use it well. + +Each provider recipe is a yaml file that can be named any way you want to name it. The examples are mostly named after the hosting providers, but they could be named "upstream.yaml" or "live.yaml", so you could `ddev pull upstream` or `ddev pull live`. If you wanted different upstream environments to pull from, you could name one "prod" and one "dev" and `ddev pull prod` and `ddev pull dev`. + +Several example recipes are at https://github.com/ddev/ddev/tree/master/cmd/ddev/cmd/dotddev_assets/providers and in this directory. + +Each provider recipe is a file named `.yaml` and consists of several mostly-optional stanzas: + +* `environment_variables`: Environment variables will be created in the web container for each of these during pull or push operations. They're used to provide context (project id, environment name, etc.) for each of the other stanzas. +* `db_pull_command`: A script that determines how ddev should pull a database. It's job is to create a gzipped database dump in /var/www/html/.ddev/.downloads/db.sql.gz. +* `files_pull_command`: A script that determines how ddev can get user-generated files from upstream. Its job is to copy the files from upstream to /var/www/html/.ddev/.downloads/files. +* `db_push_command`: A script that determines how ddev should push a database. It's job is to take a gzipped database dump from /var/www/html/.ddev/.downloads/db.sql.gz and load it on the hosting provider. +* `files_pull_command`: A script that determines how ddev push user-generated files to upstream. Its job is to copy the files from the project's user-files directory ($DDEV_FILES_DIR) to the correct place on the upstream provider. + +The environment variables provided to custom commands (see https://ddev.readthedocs.io/en/stable/users/extend/custom-commands/#environment-variables-provided) are also available for use in these recipes. + +### Provider Debugging + +You can uncomment the `set -x` in each stanza to see more of what's going on. It really helps. + +Although the various commands could be executed on the host or in other containers if configured that way, most commands are executed in the web container. So the best thing to do is to `ddev ssh` and manually execute each command you want to use. When you have it right, use it in the yaml file. diff --git a/.ddev/providers/acquia.yaml.example b/.ddev/providers/acquia.yaml.example new file mode 100755 index 00000000..707873ad --- /dev/null +++ b/.ddev/providers/acquia.yaml.example @@ -0,0 +1,82 @@ +#ddev-generated +# Example Acquia provider configuration. + +# To use this configuration, + +# 1. Get your Acquia API token from your Account Settings->API Tokens. +# 2. Make sure your ssh key is authorized on your Acquia account at Account Settings->SSH Keys +# 3. `ddev auth ssh` (this typically needs only be done once per ddev session, not every pull.) +# 4. Add / update the web_environment section in ~/.ddev/global_config.yaml with the API keys: +# ```yaml +# web_environment: +# - ACQUIA_API_KEY=xxxxxxxx +# - ACQUIA_API_SECRET=xxxxx +# ``` +# 5. Copy .ddev/providers/acquia.yaml.example to .ddev/providers/acquia.yaml. +# 6. Update the project_id and database corresponding to the environment you want to work with. +# - If have acli install, you can use the following command: `acli remote:aliases:list` +# - Or, on the Acquia Cloud Platform navigate to the environments page, click on the header and look for the "SSH URL" line. Eg. `project1.dev@cool-projects.acquia-sites.com` would have a project ID of `project1.dev` +# 7. Your project must include drush; `ddev composer require drush/drush` if it isn't there already. +# 8. `ddev restart` +# 9. Use `ddev pull acquia` to pull the project database and files. +# 10. Optionally use `ddev push acquia` to push local files and database to Acquia. Note that `ddev push` is a command that can potentially damage your production site, so this is not recommended. + +# Debugging: Use `ddev exec acli command` and `ddev exec acli auth:login` +# Make sure you remembered to `ddev auth ssh` + +environment_variables: + project_id: yourproject.dev + database_name: yourproject + +auth_command: + command: | + set -eu -o pipefail + if [ -z "${ACQUIA_API_KEY:-}" ] || [ -z "${ACQUIA_API_SECRET:-}" ]; then echo "Please make sure you have set ACQUIA_API_KEY and ACQUIA_API_SECRET in ~/.ddev/global_config.yaml" && exit 1; fi + if ! command -v drush >/dev/null ; then echo "Please make sure your project contains drush, ddev composer require drush/drush" && exit 1; fi + ssh-add -l >/dev/null || ( echo "Please 'ddev auth ssh' before running this command." && exit 1 ) + + acli -n auth:login -n --key="${ACQUIA_API_KEY}" --secret="${ACQUIA_API_SECRET}" + acli -n remote:aliases:download --all --destination-dir $HOME/.drush -n >/dev/null + +db_pull_command: + command: | + #set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + # If no database_name is configured, infer it from project_id + if [ -z "${database_name:-}" ]; then database_name=${project_id%%.*}; fi + backup_time=$(acli -n api:environments:database-backup-list ${project_id} ${database_name} --limit=1 | jq -r .[].completed_at) + backup_id="$(acli -n api:environments:database-backup-list ${project_id} ${database_name} --limit=1 | jq -r .[].id)" + backup_url=$(acli -n api:environments:database-backup-download ${project_id} ${database_name} ${backup_id} | jq -r .url) + ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible + echo "Downloading backup $backup_id from $backup_time" + curl -o /var/www/html/.ddev/.downloads/db.sql.gz ${backup_url} + +files_pull_command: + command: | + # set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible + pushd /var/www/html/.ddev/.downloads >/dev/null; + drush -r docroot rsync --exclude-paths='styles:css:js' --alias-path=~/.drush -q -y @${project_id}:%files ./files + +# push is a dangerous command. If not absolutely needed it's better to delete these lines. +db_push_command: + command: | + set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + TIMESTAMP=$(date +%y%m%d%H%M%S) + ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible + cd /var/www/html/.ddev/.downloads + drush rsync -y --alias-path=~/.drush ./db.sql.gz @${project_id}:/tmp/db.${TIMESTAMP}.sql.gz + acli -n remote:ssh -n ${project_id} -- "cd /tmp && gunzip db.${TIMESTAMP}.sql.gz" + acli -n remote:drush -n ${project_id} -- "sql-cli /dev/null # This just refreshes stale NFS if possible + drush rsync -y --alias-path=~/.drush @self:%files @${project_id}:%files diff --git a/.ddev/providers/git.yaml.example b/.ddev/providers/git.yaml.example new file mode 100755 index 00000000..d0e42da3 --- /dev/null +++ b/.ddev/providers/git.yaml.example @@ -0,0 +1,40 @@ +#ddev-generated +# Example git provider configuration. + +# To use this configuration, + +# 1. Create a git repository that contains a database dump (db.sql.gz) and a files tarball. It can be private or public, but for most people they will be private. +# 2. Configure access to the repository so that it can be accessed from where you need it. For example, on gitpod, you'll need to enable access to GitHub or Gitlab. On a regular local dev environment, you'll need to be able to access github via https or ssh. +# 3. Update the environment_variables below to point to the git repository that contains your database dump and files. + +environment_variables: + project_url: https://github.com/ddev/ddev-pull-git-test-repo + branch: main + checkout_dir: ~/tmp/ddev-pull-git-test-repo + + +auth_command: + service: host + # This actually doesn't auth, but rather just checks out the repository + command: | + set -eu -o pipefail + if [ ! -d ${checkout_dir}/.git ] ; then + git clone -q ${project_url} --branch=${branch} ${checkout_dir} + else + cd ${checkout_dir} + git reset --hard -q && git fetch && git checkout -q origin/${branch} + fi + +db_import_command: + service: host + command: | + set -eu -o pipefail + # set -x + ddev import-db --src="${checkout_dir}/db.sql.gz" + +files_import_command: + service: host + command: | + set -eu -o pipefail + # set -x + ddev import-files --src="${checkout_dir}/files" diff --git a/.ddev/providers/localfile.yaml.example b/.ddev/providers/localfile.yaml.example new file mode 100755 index 00000000..d8b9daad --- /dev/null +++ b/.ddev/providers/localfile.yaml.example @@ -0,0 +1,38 @@ +#ddev-generated +# Example local file provider configuration. + +# This will pull a database and files from an existing location, for example, +# from a Dropbox location on disk + +# To use this configuration, +# 1. You need a database dump and/or user-generated files tarball. +# 2. Copy localfile.yaml.example to localfile.yaml. +# 3. Change the copy commands as needed. +# 4. Use `ddev pull localfile` to pull the project database and files. + +# In this example, db_pull_command is not used + +# Here db_import_command imports directly from the source location +# instead of looking in .ddev/.downloads/files +db_import_command: + command: | + set -eu -o pipefail + echo $PATH + ddev --version + set -x + gzip -dc ~/Dropbox/db.sql.gz | ddev mysql db + service: host + +# In this example, files_pull_command is not used + +# files_import_command is an example of a custom importer +# that directly untars the files into their appropriate destination +files_import_command: + command: | + set -eu -o pipefail + echo $PATH + ddev --version + set -x + mkdir -p web/sites/default/files + tar -zxf ~/Dropbox/files.tar.gz -C web/sites/default/files + service: host diff --git a/.ddev/providers/pantheon.yaml.example b/.ddev/providers/pantheon.yaml.example new file mode 100755 index 00000000..2b6f805f --- /dev/null +++ b/.ddev/providers/pantheon.yaml.example @@ -0,0 +1,88 @@ +#ddev-generated +# Example Pantheon.io provider configuration. +# This example is Drupal/drush oriented, +# but can be adapted for other CMSs supported by Pantheon + +# To use this configuration: +# +# 1. Get your Pantheon.io machine token: +# a. Login to your Pantheon Dashboard, and [Generate a Machine Token](https://pantheon.io/docs/machine-tokens/) for ddev to use. +# b. Add the API token to the `web_environment` section in your global ddev configuration at ~/.ddev/global_config.yaml +# +# ``` +# web_environment: +# - TERMINUS_MACHINE_TOKEN=abcdeyourtoken +# ``` +# +# 2. Choose a Pantheon site and environment you want to use with ddev. You can usually use the site name, but in some environments you may need the site uuid, which is the long 3rd component of your site dashboard URL. So if the site dashboard is at , the site ID is 009a2cda-2c22-4eee-8f9d-96f017321555. +# +# 3. On the pantheon dashboard, make sure that at least one backup has been created. (When you need to refresh what you pull, do a new backup.) +# +# 4. Make sure your public ssh key is configured in Pantheon (Account->SSH Keys) +# +# 5. Check out project codebase from Pantheon. Enable the "Git Connection Mode" and use `git clone` to check out the code locally. +# +# 6. Configure the local checkout for ddev using `ddev config` +# +# 7. Verify that drush is installed in your project, `ddev composer require drush/drush` +# +# 8. In your project's .ddev/providers directory, copy pantheon.yaml.example to pantheon.yaml and edit the "project" under `environment_variables` (change it from `yourproject.dev`). If you want to use a different environment than "dev", change `dev` to the name of the environment. +# +# 9. If using Colima, may need to set an explicit nameserver in `~/.colima/default/colima.yaml` like `1.1.1.1`. If this configuration is changed, may also need to restart Colima. +# +# 10. `ddev restart` +# +# 11. Run `ddev pull pantheon`. The ddev environment will download the Pantheon database and files using terminus and will import the database and files into the ddev environment. You should now be able to access the project locally. +# +# 12. Optionally use `ddev push pantheon` to push local files and database to Pantheon. Note that `ddev push` is a command that can potentially damage your production site, so this is not recommended. +# + +# Debugging: Use `ddev exec terminus auth:whoami` to see what terminus knows about +# `ddev exec terminus site:list` will show available sites + +environment_variables: + project: yourproject.dev + +auth_command: + command: | + set -eu -o pipefail + ssh-add -l >/dev/null || ( echo "Please 'ddev auth ssh' before running this command." && exit 1 ) + if ! command -v drush >/dev/null ; then echo "Please make sure your project contains drush, ddev composer require drush/drush" && exit 1; fi + if [ -z "${TERMINUS_MACHINE_TOKEN:-}" ]; then echo "Please make sure you have set TERMINUS_MACHINE_TOKEN in ~/.ddev/global_config.yaml" && exit 1; fi + terminus auth:login --machine-token="${TERMINUS_MACHINE_TOKEN}" || ( echo "terminus auth login failed, check your TERMINUS_MACHINE_TOKEN" && exit 1 ) + terminus aliases 2>/dev/null + +db_pull_command: + command: | + set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible + pushd /var/www/html/.ddev/.downloads >/dev/null + terminus backup:get ${project} --element=db --to=db.sql.gz + +files_pull_command: + command: | + set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible + pushd /var/www/html/.ddev/.downloads >/dev/null; + terminus backup:get ${project} --element=files --to=files.tgz + mkdir -p files && tar --strip-components=1 -C files -zxf files.tgz + +# push is a dangerous command. If not absolutely needed it's better to delete these lines. +db_push_command: + command: | + set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible + pushd /var/www/html/.ddev/.downloads >/dev/null; + terminus remote:drush ${project} -- sql-drop -y + gzip -dc db.sql.gz | terminus remote:drush ${project} -- sql-cli + +# push is a dangerous command. If not absolutely needed it's better to delete these lines. +files_push_command: + command: | + set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + ls ${DDEV_FILES_DIR} >/dev/null # This just refreshes stale NFS if possible + drush rsync -y @self:%files @${project}:%files diff --git a/.ddev/providers/platform.yaml b/.ddev/providers/platform.yaml new file mode 100755 index 00000000..f7820f4b --- /dev/null +++ b/.ddev/providers/platform.yaml @@ -0,0 +1,105 @@ +#ddev-generated +# Example Platform.sh provider configuration. + +# Consider using `ddev get ddev/ddev-platformsh` (https://github.com/ddev/ddev-platformsh) for more +# complete platform integration. + +# To use this configuration, + +# 1. Check out the site from platform.sh and then configure it with `ddev config`. You'll want to use `ddev start` and make sure the basic functionality is working. +# 2. Obtain and configure an API token. +# a. Login to the Platform.sh Dashboard and go to Account->API Tokens to create an API token for ddev to use. +# b. Add the API token to the `web_environment` section in your global ddev configuration at ~/.ddev/global_config.yaml: +# ```yaml +# web_environment: +# - PLATFORMSH_CLI_TOKEN=abcdeyourtoken +# ``` +# 3. Add PLATFORM_PROJECT and PLATFORM_ENVIRONMENT variables to your project `.ddev/config.yaml` or a `.ddev/config.platform.yaml` +# ```yaml +# web_environment: +# - PLATFORM_PROJECT=nf4amudfn23biyourproject +# - PLATFORM_ENVIRONMENT=main +# 4. `ddev restart` +# 5. Run `ddev pull platform`. After you agree to the prompt, the current upstream database and files will be downloaded. +# 6. Optionally use `ddev push platform` to push local files and database to platform.sh. Note that `ddev push` is a command that can potentially damage your production site, so this is not recommended. + +# If you have more than one database on your Platform.sh project, +# you will likely to choose which one you want to use +# as the primary database ('db'). +# Do this by setting PLATFORM_PRIMARY_RELATIONSHIP, for example, `ddev config --web-environment-add=PLATFORM_PRIMARY_RELATIONSHIP=main` +# or run `ddev pull platform` with the environment variable, for example +# `ddev pull platform -y --environment=PLATFORM_PRIMARY_RELATIONSHIP=main` +# If you need to change this `platform.yaml` recipe, you can change it to suit your needs, but remember to remove the "ddev-generated" line from the top. + +# Debugging: Use `ddev exec platform` to see what platform.sh knows about +# your configuration and whether it's working correctly. + +auth_command: + command: | + set -eu -o pipefail + if [ -z "${PLATFORMSH_CLI_TOKEN:-}" ]; then echo "Please make sure you have set PLATFORMSH_CLI_TOKEN." && exit 1; fi + if [ -z "${PLATFORM_PROJECT:-}" ]; then echo "Please make sure you have set PLATFORM_PROJECT." && exit 1; fi + if [ -z "${PLATFORM_ENVIRONMENT:-}" ]; then echo "Please make sure you have set PLATFORM_ENVIRONMENT." && exit 1; fi + +db_pull_command: + command: | + # set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + export PLATFORMSH_CLI_NO_INTERACTION=1 + ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible + # /tmp/db_relationships.yaml is the full yaml output of the database relationships + db_relationships_file=/tmp/db_relationships.yaml + PLATFORM_RELATIONSHIPS="" platform relationships -y -e "${PLATFORM_ENVIRONMENT}" | yq 'with_entries(select(.[][].type == "mariadb:*" or .[][].type == "*mysql:*" or .[][].type == "postgresql:*")) ' >${db_relationships_file} + db_relationships=($(yq ' keys | .[] ' ${db_relationships_file})) + db_names=($(yq '.[][].path' ${db_relationships_file})) + db_count=${#db_relationships[@]} + # echo "db_relationships=${db_relationships} sizeof db_relationships=${#db_relationships[@]} db_names=${db_names} db_count=${db_count} PLATFORM_PRIMARY_RELATIONSHIP=${PLATFORM_PRIMARY_RELATIONSHIP}" + # If we have only one database, import it into local database named 'db' + if [ ${#db_names[@]} -eq 1 ]; then db_names[0]="db"; fi + + for (( i=0; i<${#db_relationships[@]}; i++ )); do + db_name=${db_names[$i]} + rel=${db_relationships[$i]} + # if PLATFORM_PRIMARY_RELATIONSHIP is set, then when doing that one, import it into local database 'db' + if [ "${rel}" = "${PLATFORM_PRIMARY_RELATIONSHIP:-notset}" ] ; then + echo "PLATFORM_PRIMARY_RELATIONSHIP=${PLATFORM_PRIMARY_RELATIONSHIP:-} so using it as database 'db' instead of the upstream '${db_name}'" + db_name="db" + fi + + platform db:dump --yes --relationship=${rel} --gzip --file=/var/www/html/.ddev/.downloads/${db_name}.sql.gz --project="${PLATFORM_PROJECT:-setme}" --environment="${PLATFORM_ENVIRONMENT:-setme}" + done + echo "Downloaded db dumps for databases '${db_names[@]}'" + +files_import_command: + command: | + #set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + export PLATFORMSH_CLI_NO_INTERACTION=1 + # Use $PLATFORM_MOUNTS if it exists to get list of mounts to download, otherwise just web/sites/default/files (drupal) + declare -a mounts=(${PLATFORM_MOUNTS:-/web/sites/default/files}) + platform mount:download --all --yes --quiet --project="${PLATFORM_PROJECT}" --environment="${PLATFORM_ENVIRONMENT}" --target=/var/www/html + + +# push is a dangerous command. If not absolutely needed it's better to delete these lines. +db_push_command: + command: | + # set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + export PLATFORMSH_CLI_NO_INTERACTION=1 + ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible + pushd /var/www/html/.ddev/.downloads >/dev/null; + if [ "${PLATFORM_PRIMARY_RELATIONSHIP:-}" != "" ] ; then + rel="--relationship ${PLATFORM_PRIMARY_RELATIONSHIP}" + fi + gzip -dc db.sql.gz | platform db:sql --project="${PLATFORM_PROJECT}" ${rel:-} --environment="${PLATFORM_ENVIRONMENT}" + +# push is a dangerous command. If not absolutely needed it's better to delete these lines. +# TODO: This is a naive, Drupal-centric push, which needs adjustment for the mount to be pushed. +files_push_command: + command: | + # set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + export PLATFORMSH_CLI_NO_INTERACTION=1 + ls "${DDEV_FILES_DIR}" >/dev/null # This just refreshes stale NFS if possible + platform mount:upload --yes --quiet --project="${PLATFORM_PROJECT}" --environment="${PLATFORM_ENVIRONMENT}" --source="${DDEV_FILES_DIR}" --mount=web/sites/default/files + diff --git a/.ddev/providers/rsync.yaml.example b/.ddev/providers/rsync.yaml.example new file mode 100755 index 00000000..62420064 --- /dev/null +++ b/.ddev/providers/rsync.yaml.example @@ -0,0 +1,64 @@ +#ddev-generated +# Example rsync provider configuration. + +# This will pull a database and files from a network location, for example, +# server or other jumphost. It operates inside the web container and uses +# ssh, so you need to `ddev auth ssh` first. + +# To use this configuration, +# +# 1. You need a database dump and/or user-generated files tarball that you +# have access to somewhere on the internet +# 2. Copy rsync.yaml.example to rsync.yaml (or name it as you see fit) +# 3. `ddev auth ssh` (only needs to be done once per ddev session or reboot) +# 4. Use `ddev pull rsync` to pull the project database and files. +# 5. `ddev push rsync` can push the project database and files + +# Note that while this is done in the web container (because rsync will always be there) +# it could also be done on the host, and then you wouldn't need the +# `ddev auth ssh` + +environment_variables: + dburl: you@yourhost.example.com:tmp/db.sql.gz + filesurl: you@yourhost.example.com:tmp/files.tar.gz + +auth_command: + command: | + set -eu -o pipefail + ssh-add -l >/dev/null || ( echo "Please 'ddev auth ssh' before running this command." && exit 1 ) + +db_pull_command: + command: | + # set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + rsync -az "${dburl}" /var/www/html/.ddev/.downloads + service: web + +files_pull_command: + command: | + # set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible + pushd /var/www/html/.ddev/.downloads >/dev/null + rm -f files.tar.gz + rsync -avz "${filesurl}" . + tar -xzf files.tar.gz -C files/ + service: web + +# Pushing a database or files to upstream can be dangerous and not recommended. +# This example is not very dangerous because it's not actually deploying the +# files. But if the db were deployed on production it would overwrite +# the current db or files there. +db_push_command: + command: | + # set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible + mysqldump db | gzip >/var/www/html/.ddev/.downloads/db_push.sql.gz + rsync -avz /var/www/html/.ddev/.downloads/db_push.sql.gz "${dburl}" + +files_push_command: + command: | + # set -x # You can enable bash debugging output by uncommenting + set -eu -o pipefail + rsync -az "${DDEV_FILES_DIR}/" "${filesurl}/" diff --git a/.ddev/xhprof/xhprof_prepend.php b/.ddev/xhprof/xhprof_prepend.php new file mode 100755 index 00000000..e80cbac5 --- /dev/null +++ b/.ddev/xhprof/xhprof_prepend.php @@ -0,0 +1,52 @@ +save_run($xhprof_data, $appNamespace); + + // Uncomment to append profile link to the page (and remove the ddev generated first line) + // append_profile_link($run_id, $appNamespace); +} + +// If invoked, this will append a profile link to the output HTML +// This works on some CMSs, like Drupal 7. It does not work on Drupal8/9 +// and can have unwanted side-effects on TYPO3 +function append_profile_link($run_id, $appNamespace) +{ + $base_link = (isset($_SERVER['HTTPS']) && $_SERVER['HTTPS'] === 'on' ? "https" : "http") . "://$_SERVER[HTTP_HOST]/xhprof/"; + + $profiler_url = sprintf('%sindex.php?run=%s&source=%s', $base_link, $run_id, $appNamespace); + echo ''; +} diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a0a82f4..9aabc229 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -69,6 +69,36 @@ ### Removed - Removed built-in support for the Verbb Comments plugin, which provides its own Feed Me driver. +## 4.6.0 - 2023-03-16 + +- Added the “Set Empty Values” feed setting, which determines whether empty values in the feed should be respected or ignored. ([#1228](https://github.com/craftcms/feed-me/pull/1228), [#797](https://github.com/craftcms/feed-me/issues/797), [#723](https://github.com/craftcms/feed-me/issues/723), [#854](https://github.com/craftcms/feed-me/issues/854), [#680](https://github.com/craftcms/feed-me/issues/680)) +- Disabled elements are no longer redundantly re-disabled, drastically improving the performance of some feed imports. ([#1248](https://github.com/craftcms/feed-me/pull/1248), [#1241](https://github.com/craftcms/feed-me/issues/1241)) +- Fixed a bug where some feed element data would be considered changed even if there were no changes. ([#1220](https://github.com/craftcms/feed-me/pull/1220), [#1219](https://github.com/craftcms/feed-me/issues/1219), [#1223](https://github.com/craftcms/feed-me/pull/1223/), [#1219](https://github.com/craftcms/feed-me/issues/1219)) +- Fixed a bug where the default value modal for relational fields on the feed mapping page would show all available sources, not just the sources allowed for the field. ([#1234](https://github.com/craftcms/feed-me/pull/1234)) +- Fixed a PHP error that could occur when a feed contained an empty value that was mapped to an Assets field. ([#1229](https://github.com/craftcms/feed-me/pull/1229), [#1195](https://github.com/craftcms/feed-me/issues/1195), [#1106](https://github.com/craftcms/feed-me/issues/1106), [#1154](https://github.com/craftcms/feed-me/issues/1154)) +- Fixed a bug where empty arrays could be misinterpreted during feed imports. ([#1236](https://github.com/craftcms/feed-me/pull/1236)) +- Fixed several issues related to importing categories and Structure section entries. ([#1240](https://github.com/craftcms/feed-me/pull/1240), [#1154](https://github.com/craftcms/feed-me/issues/1154)) +- Fixed a PHP error that could occur when importing relational field data within a Matrix field. ([#1069](https://github.com/craftcms/feed-me/issues/1069)) +- Fixed a PHP error that occurred when importing an asset with a filename over 255 characters long. + +## 4.5.4 - 2023-01-09 + +### Fixed +- Fixed a PHP error that could occur when using the `--continue-on-error` flag on the `feed-me/feeds/queue` CLI command. +- Fixed a bug where sites in a Site Group would all have their statuses updated when a feed was targeting a single site. ([#1208](https://github.com/craftcms/feed-me/issues/1208)) +- Fixed importing using the LinkIt plugin. ([#1203](https://github.com/craftcms/feed-me/issues/1203)) + +## 4.5.3 - 2022-05-31 + +### Fixed +- Fixed a PHP error that could occur when importing a base64-encoded asset. +- Fixed a bug where asset file names were getting normalized before searching for an existing asset when the feed specified a file path. ([#847](https://github.com/craftcms/feed-me/issues/847)) + +## 4.5.2 - 2022-05-17 + +### Changed +- The `EVENT_AFTER_PARSE_FEED` event now passes in the feed’s ID. ([#1107](https://github.com/craftcms/feed-me/issues/1107)) + ## 4.5.1 - 2022-05-05 ### Fixed @@ -77,7 +107,7 @@ ## 4.5.0 - 2022-04-21 ### Changed -- Entries imported within sections whose Propagation Method is set to “Let each entry choose which sites it should be saved to” are no longer created for all of the section's supported sites. ([#1084](https://github.com/craftcms/feed-me/issues/1084)) +- Entries imported within sections whose Propagation Method is set to “Let each entry choose which sites it should be saved to” are no longer created for all of the section’s supported sites. ([#1084](https://github.com/craftcms/feed-me/issues/1084)) ## 4.4.3 - 2022-04-06 @@ -95,7 +125,7 @@ ### Fixed - Fixed a bug that would prevent relating products in Products field. ([#1058](https://github.com/craftcms/feed-me/issues/1058)) - Fixed a bug that would prevent changing element status on multisite when the target site was set to default. ([#606](https://github.com/craftcms/feed-me/issues/606)) -- Fixed a bug where mapping fields for an entry fieldtype wasn't showing the correct available fields. ([#1098](https://github.com/craftcms/feed-me/pull/1098), ([#692](https://github.com/craftcms/feed-me/issues/692)), ([#825](https://github.com/craftcms/feed-me/issues/825)) +- Fixed a bug where mapping fields for an entry fieldtype wasn’t showing the correct available fields. ([#1098](https://github.com/craftcms/feed-me/pull/1098), ([#692](https://github.com/craftcms/feed-me/issues/692)), ([#825](https://github.com/craftcms/feed-me/issues/825)) ## 4.4.1.1 - 2021-12-06 @@ -368,7 +398,7 @@ ## 3.1.12 - 2019-03-03 ### Fixed -- Ensure all complex fields don't process when none of their sub-fields are mapped. +- Ensure all complex fields don’t process when none of their sub-fields are mapped. ## 3.1.11 - 2019-03-02 @@ -376,7 +406,7 @@ - Added config option to run Garbage Collection before a feed starts. ### Fixed -- Ensure complex fields (Matrix, etc) don't process when none of their sub-fields are mapped. +- Ensure complex fields (Matrix, etc) don’t process when none of their sub-fields are mapped. ## 3.1.10 - 2019-02-26 @@ -466,14 +496,14 @@ ## 3.1.1 - 2019-02-01 ### Added -- Updates to asset element importing, including "URL or Path" field. +- Updates to asset element importing, including “URL or Path” field. - Added docs guide for asset element importing (finally, right). ### Changed - Add some more clarity around errors in help requests. - Update processing events to be cancelable and modify params. - Upgrades to nesbot/carbon ^2.10. -- Allow `getSelectOptions()` to modify the ‘none’ option. +- Allow `getSelectOptions()` to modify the `none` option. - Alphabetise help feeds. ### Fixed @@ -658,7 +688,7 @@ - Fix element fields in Matrix not mapping correctly. - Fix Twig parsing in default and feed data too early, resulting in empty values. - Matrix - fix block types with no fields messing things up. -- Fix ‘placeholder’ in products query causing PostgreSQL errors. +- Fix `placeholder` in products query causing PostgreSQL errors. - Fix error thrown on entry mapping screen when no sections are available. - Assets - fix filename matches not including subfolders. - Table - protect against array values importing into fields. @@ -690,7 +720,7 @@ ### Changed - Ensure element fields don’t throw fatal errors when unable to save - allowing owner element to continue. - Products - remove required attribute on unlimited stock. -- Change element field matching existing elements querying. Fixes the case where trying to match elements with the keyword 'not' in the value. +- Change element field matching existing elements querying. Fixes the case where trying to match elements with the keyword `not` in the value. ### Fixed - Fix primary element iterator when only one item in feed (in some cases). @@ -721,7 +751,7 @@ - Use registerTwigExtension(), otherwise may cause Twig to be loaded before it should be (thanks @brandonkelly) - Entry - Fix authors not being created when they should be. - CSV - fix for line breaks in headings causing issues. -- Fix for variants in that they can potentially move to another product type, or otherwise plucked from a product other than the one you're importing. +- Fix for variants in that they can potentially move to another product type, or otherwise plucked from a product other than the one you’re importing. - Fix incorrect variant custom field namespace. ## 3.0.0-beta.17 - 2018-07-19 @@ -835,7 +865,7 @@ - Fix matching existing elements with special characters - Improve handling of remote asset handling when `HEAD` requests fail - Fix help widget -- Improve date-helper to handle ‘0’ +- Improve date-helper to handle `0` - Table - ensure dates are parsed ## 3.0.0-beta.9 - 2018-04-28 @@ -913,7 +943,7 @@ ## 3.0.0-beta.1 - 2018-04-03 -> {warning} Feed Me 3.0.0 is a major release with significant, breaking changes. Be sure to back up your existing Feed Me 2.x.x settings. In most cases, you'll be required to re-map fields to your feed data, as this has been heavily changed and improved. +> {warning} Feed Me 3.0.0 is a major release with significant, breaking changes. Be sure to back up your existing Feed Me 2.x.x settings. In most cases, you’ll be required to re-map fields to your feed data, as this has been heavily changed and improved. ### Added - Support for Craft 3, and all that comes with it. @@ -932,11 +962,11 @@ - Better handling of default field values under the hood. - Lots of smaller improvements pathing the way to more major changes in 3.1.0. - Better support for uploading remote assets (a little faster too). -- When running from the command line (curl, wget, etc), feed processing will wait until it's finished before ending the request. +- When running from the command line (curl, wget, etc), feed processing will wait until it’s finished before ending the request. - Improved UI for mapping complex fields (Matrix, Table). Includes setting collapsed state for Matrix fields. - Improved UI for mapping sub-element fields. Fields are hidden by default and can be toggled to provide better visual clarity. -- Improved UI for logs, also shows log 'type' to easily pick up errors. -- When a feed fails or contains errors it will no longer show the red error symbol for the queue job. Errors are recorded in the logs, but it won't cause other queue jobs to prevent being run, and notifying clients of an error. +- Improved UI for logs, also shows log `type` to easily pick up errors. +- When a feed fails or contains errors it will no longer show the red error symbol for the queue job. Errors are recorded in the logs, but it won’t cause other queue jobs to prevent being run, and notifying clients of an error. - Logs now capture additional information of line/file when exceptions occur. - utilise Guzzle for all HTTP requests, rather than new instances of Curl. - Improved Help widget to utilise API for sending emails, not relying on local relay. Caused immeasurable amount of issues for people try to get support! @@ -992,7 +1022,7 @@ - Date - Add date formatting options for date field. - Ensure each run of the feed uses a fresh criteria model. - Matrix - improvements for Super Table handling. -- Add extra truthy detections for ‘live’ and ‘enabled’. +- Add extra truthy detections for `live` and `enabled`. ### Fixed - Load custom (fixed) version of selectize to fix being unable to un-select defaults. @@ -1004,7 +1034,7 @@ - Fix regex for short-hand twig detection. - Fix for Table field not processing more than 10 rows. - Ensure more than just plain sub-element field are styed correctly. -- Elements - Ensure we properly escape ‘{‘ characters, and don’t assume they’re short-hand twig. +- Elements - Ensure we properly escape `{` characters, and don’t assume they’re short-hand twig. - Entries fieldtype - don’t rely on parent element being a section. - Assets - Fix folder-mapping from 2.0.7 (requires re-mapping). - Support for limitAutoSlugsToAscii for element slugs. @@ -1110,7 +1140,7 @@ ### Changed - Refactor remote asset uploading/handling. - Remote assets - Better support for dynamic paths set in asset fields (ie `{slug}`). -- Remote assets - When set to `Keep Original`, don't download it and then check if it exists in Craft - it can be skipped. +- Remote assets - When set to `Keep Original`, don’t download it and then check if it exists in Craft - it can be skipped. - Ensure all fields are bootstrapped with the owner element being imported. - Improve Commerce Products matching on existing elements (including better variant-field support). - Remove certain unique identifier options for Product Variants - the element criteria doesn’t support them anyway. @@ -1169,7 +1199,7 @@ - Added support for Categories, Users, Entries, Commerce Products - Support for third-party element types - Auto-upload Assets when mapping -- Support to map content to element's inner fields (think fields for assets) +- Support to map content to element’s inner fields (think fields for assets) - Added Assets ElementType - easily upload assets into Craft. - Direct access to mapping screen - Support attribute-mapping for XML feeds @@ -1178,9 +1208,9 @@ - Added debug icon to Feeds index. - Added Element ID mapping. *A word of warning - * do not use this for importing new data. [Read more](https://sgroup.com.au/plugins/feedme/feature-tour/field-mapping#element-iDs). - Added parent-mapping for Entry and Category. -- Elements can be created if they don't exist +- Elements can be created if they don’t exist - Assets can be uploaded automatically, with options for conflict handling -- Added support to map element fields' own custom fields (think fields for assets). Currently only supports simple fields. +- Added support to map element fields’ own custom fields (think fields for assets). Currently only supports simple fields. - Support for importing Categories, Users, Entries, Commerce Products - Support for third-party Element Types using `registerFeedMeElementTypes` - Added `onBeforeProcessFeed`, `onProcessFeed`, and `onStepProcessFeed` events @@ -1204,9 +1234,9 @@ - Feed mapping now looks at entire feed structure for nodes, rather than just the first node - Feed mapping is no longer case-insensitive - Proper confirmation screen when importing, with progress bar -- Feeds no longer die when an error occurs. It'll try to complete the rest of the feed, notifying you of errors at the end of processing. +- Feeds no longer die when an error occurs. It’ll try to complete the rest of the feed, notifying you of errors at the end of processing. - Sub-folders are now searched for existing assets. -- Improved handling of inconsistent, repeatable nodes (I'm looking at you XML). +- Improved handling of inconsistent, repeatable nodes (I’m looking at you XML). - Asterisks are now shown during mapping for required fields - a handy reminder. - User importing no longer requires a User Group set. - More modular handling by moving to separate classes @@ -1220,9 +1250,9 @@ - Remove database logging (no longer used) - Fix support for local feeds - Feed no longer lags when processing from the control panel -- Fix issue where task wouldn't fire asynchronously, locking up the CP -- Fixed issue where pending/disabled existing entries weren't being matched for updating/deleting -- Prevent feed from processing if there are no nodes to process. Fixes deletion when elements shouldn't be +- Fix issue where task wouldn’t fire asynchronously, locking up the CP +- Fixed issue where pending/disabled existing entries weren’t being matched for updating/deleting +- Prevent feed from processing if there are no nodes to process. Fixes deletion when elements shouldn’t be - Treat boolean-like values with the respect they deserve. - Added Shipping Category for Commerce Products. - Fixes to Help requests not validating - therefore unable to send @@ -1250,7 +1280,7 @@ ### Changed - FeedUrl attribute stored as `TEXT` column type to allow for longer URLs. -- Improved JSON parsing to use Yii's JsonHelper class, with better logging when failing. +- Improved JSON parsing to use Yii’s JsonHelper class, with better logging when failing. ## 1.4.9 - 2016-03-15 @@ -1342,7 +1372,7 @@ ## 1.3.3 - 2015-11-25 -- Minor fix for log reporting which wasn't displaying errors in a useful way. +- Minor fix for log reporting which wasn’t displaying errors in a useful way. ## 1.3.2 - 2015-11-25 @@ -1354,7 +1384,7 @@ ## 1.3.0 - 2015-11-25 -- Refactored direct processing to utalize Craft's tasks service, rather than using pure PHP processing. This greatly improves performance as PHP processing would run out of memory extremely quickly. +- Refactored direct processing to utalize Craft’s tasks service, rather than using pure PHP processing. This greatly improves performance as PHP processing would run out of memory extremely quickly. ## 1.2.9 - 2015-11-25 @@ -1363,7 +1393,7 @@ - Added help tab, allowing users to submit their feed info and setup for debugging/troubleshooting. - Fix for fields in Matrix blocks only succesfully mapping textual fields. Complex fields such as Assets, Entries, etc were not mapping correctly. - Fix for only one item being processed when Delete duplication handling was selected. -- Fix for Dropdown/RadioButtons causing a critical error when a provided value didn't exist in the field. +- Fix for Dropdown/RadioButtons causing a critical error when a provided value didn’t exist in the field. - Added credit and plugin url to footer. ## 1.2.8 - 2015-11-25 @@ -1391,7 +1421,7 @@ - Refactoring for performance improvements. - Remove database logging until a better performing option is figured out. Logging still occurs to the file system under `craft/storage/runtime/logs/`. - Added optional backup option per-feed (default to true). -- Minor fix so direct feed link doesn't use `siteUrl`. +- Minor fix so direct feed link doesn’t use `siteUrl`. ## 1.2.4 - 2015-11-25 @@ -1400,7 +1430,7 @@ ## 1.2.3 - 2015-11-25 - Primary Element is no longer required - important for JSON feeds. -- Fixes for when no primary element specified. It's pretty much optional now. +- Fixes for when no primary element specified. It’s pretty much optional now. - UI tidy for mapping table. - Fix for duplication handling not matching in some cases. Now uses element search. diff --git a/composer.lock b/composer.lock index 1c837623..b1a2e937 100644 --- a/composer.lock +++ b/composer.lock @@ -793,26 +793,26 @@ }, { "name": "craftcms/cms", - "version": "4.3.10", + "version": "4.4.3", "source": { "type": "git", "url": "https://github.com/craftcms/cms.git", - "reference": "af40a2d6e0557f6a600a22f29f1660639cd11b81" + "reference": "d1edaf139b2245dda24fe172a80ec58c5c1a1174" }, "dist": { "type": "zip", - "url": "https://api.github.com/repos/craftcms/cms/zipball/af40a2d6e0557f6a600a22f29f1660639cd11b81", - "reference": "af40a2d6e0557f6a600a22f29f1660639cd11b81", + "url": "https://api.github.com/repos/craftcms/cms/zipball/d1edaf139b2245dda24fe172a80ec58c5c1a1174", + "reference": "d1edaf139b2245dda24fe172a80ec58c5c1a1174", "shasum": "" }, "require": { "commerceguys/addressing": "^1.2", "composer/composer": "2.2.19", "craftcms/oauth2-craftid": "~1.0.0", - "craftcms/plugin-installer": "~1.5.6", + "craftcms/plugin-installer": "~1.6.0", "craftcms/server-check": "~2.1.2", "creocoder/yii2-nested-sets": "~0.9.0", - "elvanto/litemoji": "^3.0.1", + "elvanto/litemoji": "^4.3.0", "enshrined/svg-sanitize": "~0.15.0", "ext-bcmath": "*", "ext-curl": "*", @@ -868,7 +868,8 @@ "fakerphp/faker": "^1.19.0", "league/factory-muffin": "^3.3.0", "phpstan/phpstan": "^1.8.5", - "vlucas/phpdotenv": "^5.4.1" + "vlucas/phpdotenv": "^5.4.1", + "yiisoft/yii2-redis": "^2.0" }, "suggest": { "ext-exif": "Adds support for parsing image EXIF data.", @@ -907,7 +908,7 @@ "rss": "https://github.com/craftcms/cms/releases.atom", "source": "https://github.com/craftcms/cms" }, - "time": "2023-02-17T22:32:58+00:00" + "time": "2023-03-16T18:21:00+00:00" }, { "name": "craftcms/oauth2-craftid", @@ -966,16 +967,16 @@ }, { "name": "craftcms/plugin-installer", - "version": "1.5.7", + "version": "1.6.0", "source": { "type": "git", "url": "https://github.com/craftcms/plugin-installer.git", - "reference": "23ec472acd4410b70b07d5a02b2b82db9ee3f66b" + "reference": "bd1650e8da6d5ca7a8527068d3e51c34bc7b6b4f" }, "dist": { "type": "zip", - "url": "https://api.github.com/repos/craftcms/plugin-installer/zipball/23ec472acd4410b70b07d5a02b2b82db9ee3f66b", - "reference": "23ec472acd4410b70b07d5a02b2b82db9ee3f66b", + "url": "https://api.github.com/repos/craftcms/plugin-installer/zipball/bd1650e8da6d5ca7a8527068d3e51c34bc7b6b4f", + "reference": "bd1650e8da6d5ca7a8527068d3e51c34bc7b6b4f", "shasum": "" }, "require": { @@ -1015,7 +1016,7 @@ "rss": "https://craftcms.com/changelog.rss", "source": "https://github.com/craftcms/cms" }, - "time": "2021-02-18T02:01:38+00:00" + "time": "2023-02-22T13:17:00+00:00" }, { "name": "craftcms/server-check", @@ -1429,24 +1430,25 @@ }, { "name": "elvanto/litemoji", - "version": "3.0.1", + "version": "4.3.0", "source": { "type": "git", "url": "https://github.com/elvanto/litemoji.git", - "reference": "acd6fd944814683983dcdfcf4d33f24430631b77" + "reference": "f13cf10686f7110a3b17d09de03050d0708840b8" }, "dist": { "type": "zip", - "url": "https://api.github.com/repos/elvanto/litemoji/zipball/acd6fd944814683983dcdfcf4d33f24430631b77", - "reference": "acd6fd944814683983dcdfcf4d33f24430631b77", + "url": "https://api.github.com/repos/elvanto/litemoji/zipball/f13cf10686f7110a3b17d09de03050d0708840b8", + "reference": "f13cf10686f7110a3b17d09de03050d0708840b8", "shasum": "" }, "require": { - "php": ">=7.0" + "ext-mbstring": "*", + "php": ">=7.3" }, "require-dev": { - "milesj/emojibase": "6.0.*", - "phpunit/phpunit": "^6.0" + "milesj/emojibase": "7.0.*", + "phpunit/phpunit": "^9.0" }, "type": "library", "autoload": { @@ -1465,9 +1467,9 @@ ], "support": { "issues": "https://github.com/elvanto/litemoji/issues", - "source": "https://github.com/elvanto/litemoji/tree/3.0.1" + "source": "https://github.com/elvanto/litemoji/tree/4.3.0" }, - "time": "2020-11-27T05:08:33+00:00" + "time": "2022-10-28T02:32:19+00:00" }, { "name": "enshrined/svg-sanitize", @@ -1789,16 +1791,16 @@ }, { "name": "guzzlehttp/psr7", - "version": "2.4.3", + "version": "2.4.4", "source": { "type": "git", "url": "https://github.com/guzzle/psr7.git", - "reference": "67c26b443f348a51926030c83481b85718457d3d" + "reference": "3cf1b6d4f0c820a2cf8bcaec39fc698f3443b5cf" }, "dist": { "type": "zip", - "url": "https://api.github.com/repos/guzzle/psr7/zipball/67c26b443f348a51926030c83481b85718457d3d", - "reference": "67c26b443f348a51926030c83481b85718457d3d", + "url": "https://api.github.com/repos/guzzle/psr7/zipball/3cf1b6d4f0c820a2cf8bcaec39fc698f3443b5cf", + "reference": "3cf1b6d4f0c820a2cf8bcaec39fc698f3443b5cf", "shasum": "" }, "require": { @@ -1888,7 +1890,7 @@ ], "support": { "issues": "https://github.com/guzzle/psr7/issues", - "source": "https://github.com/guzzle/psr7/tree/2.4.3" + "source": "https://github.com/guzzle/psr7/tree/2.4.4" }, "funding": [ { @@ -1904,7 +1906,7 @@ "type": "tidelift" } ], - "time": "2022-10-26T14:07:24+00:00" + "time": "2023-03-09T13:19:02+00:00" }, { "name": "illuminate/collections", @@ -6208,16 +6210,16 @@ }, { "name": "voku/portable-utf8", - "version": "6.0.12", + "version": "6.0.13", "source": { "type": "git", "url": "https://github.com/voku/portable-utf8.git", - "reference": "db0583727bb17666bbd2ba238c85babb973fd165" + "reference": "b8ce36bf26593e5c2e81b1850ef0ffb299d2043f" }, "dist": { "type": "zip", - "url": "https://api.github.com/repos/voku/portable-utf8/zipball/db0583727bb17666bbd2ba238c85babb973fd165", - "reference": "db0583727bb17666bbd2ba238c85babb973fd165", + "url": "https://api.github.com/repos/voku/portable-utf8/zipball/b8ce36bf26593e5c2e81b1850ef0ffb299d2043f", + "reference": "b8ce36bf26593e5c2e81b1850ef0ffb299d2043f", "shasum": "" }, "require": { @@ -6283,7 +6285,7 @@ ], "support": { "issues": "https://github.com/voku/portable-utf8/issues", - "source": "https://github.com/voku/portable-utf8/tree/6.0.12" + "source": "https://github.com/voku/portable-utf8/tree/6.0.13" }, "funding": [ { @@ -6307,7 +6309,7 @@ "type": "tidelift" } ], - "time": "2023-01-11T12:26:16+00:00" + "time": "2023-03-08T08:35:38+00:00" }, { "name": "voku/stop-words", @@ -7619,16 +7621,16 @@ }, { "name": "myclabs/deep-copy", - "version": "1.11.0", + "version": "1.11.1", "source": { "type": "git", "url": "https://github.com/myclabs/DeepCopy.git", - "reference": "14daed4296fae74d9e3201d2c4925d1acb7aa614" + "reference": "7284c22080590fb39f2ffa3e9057f10a4ddd0e0c" }, "dist": { "type": "zip", - "url": "https://api.github.com/repos/myclabs/DeepCopy/zipball/14daed4296fae74d9e3201d2c4925d1acb7aa614", - "reference": "14daed4296fae74d9e3201d2c4925d1acb7aa614", + "url": "https://api.github.com/repos/myclabs/DeepCopy/zipball/7284c22080590fb39f2ffa3e9057f10a4ddd0e0c", + "reference": "7284c22080590fb39f2ffa3e9057f10a4ddd0e0c", "shasum": "" }, "require": { @@ -7666,7 +7668,7 @@ ], "support": { "issues": "https://github.com/myclabs/DeepCopy/issues", - "source": "https://github.com/myclabs/DeepCopy/tree/1.11.0" + "source": "https://github.com/myclabs/DeepCopy/tree/1.11.1" }, "funding": [ { @@ -7674,7 +7676,7 @@ "type": "tidelift" } ], - "time": "2022-03-03T13:19:32+00:00" + "time": "2023-03-08T13:26:56+00:00" }, { "name": "nikic/php-parser", @@ -7845,16 +7847,16 @@ }, { "name": "phpstan/phpstan", - "version": "1.10.4", + "version": "1.10.7", "source": { "type": "git", "url": "https://github.com/phpstan/phpstan.git", - "reference": "8d39218664b45a4a42d5be66d2b63dcf8c149982" + "reference": "b10ceb526d9607903c5b2673f1fc8775dbe48975" }, "dist": { "type": "zip", - "url": "https://api.github.com/repos/phpstan/phpstan/zipball/8d39218664b45a4a42d5be66d2b63dcf8c149982", - "reference": "8d39218664b45a4a42d5be66d2b63dcf8c149982", + "url": "https://api.github.com/repos/phpstan/phpstan/zipball/b10ceb526d9607903c5b2673f1fc8775dbe48975", + "reference": "b10ceb526d9607903c5b2673f1fc8775dbe48975", "shasum": "" }, "require": { @@ -7883,8 +7885,11 @@ "static analysis" ], "support": { + "docs": "https://phpstan.org/user-guide/getting-started", + "forum": "https://github.com/phpstan/phpstan/discussions", "issues": "https://github.com/phpstan/phpstan/issues", - "source": "https://github.com/phpstan/phpstan/tree/1.10.4" + "security": "https://github.com/phpstan/phpstan/security/policy", + "source": "https://github.com/phpstan/phpstan-src" }, "funding": [ { @@ -7900,7 +7905,7 @@ "type": "tidelift" } ], - "time": "2023-03-06T13:39:20+00:00" + "time": "2023-03-16T15:24:20+00:00" }, { "name": "phpunit/php-code-coverage", @@ -8222,16 +8227,16 @@ }, { "name": "phpunit/phpunit", - "version": "9.6.4", + "version": "9.6.5", "source": { "type": "git", "url": "https://github.com/sebastianbergmann/phpunit.git", - "reference": "9125ee085b6d95e78277dc07aa1f46f9e0607b8d" + "reference": "86e761949019ae83f49240b2f2123fb5ab3b2fc5" }, "dist": { "type": "zip", - "url": "https://api.github.com/repos/sebastianbergmann/phpunit/zipball/9125ee085b6d95e78277dc07aa1f46f9e0607b8d", - "reference": "9125ee085b6d95e78277dc07aa1f46f9e0607b8d", + "url": "https://api.github.com/repos/sebastianbergmann/phpunit/zipball/86e761949019ae83f49240b2f2123fb5ab3b2fc5", + "reference": "86e761949019ae83f49240b2f2123fb5ab3b2fc5", "shasum": "" }, "require": { @@ -8264,8 +8269,8 @@ "sebastian/version": "^3.0.2" }, "suggest": { - "ext-soap": "*", - "ext-xdebug": "*" + "ext-soap": "To be able to generate mocks based on WSDL files", + "ext-xdebug": "PHP extension that provides line coverage as well as branch and path coverage" }, "bin": [ "phpunit" @@ -8304,7 +8309,7 @@ ], "support": { "issues": "https://github.com/sebastianbergmann/phpunit/issues", - "source": "https://github.com/sebastianbergmann/phpunit/tree/9.6.4" + "source": "https://github.com/sebastianbergmann/phpunit/tree/9.6.5" }, "funding": [ { @@ -8320,7 +8325,7 @@ "type": "tidelift" } ], - "time": "2023-02-27T13:06:37+00:00" + "time": "2023-03-09T06:34:10+00:00" }, { "name": "sebastian/cli-parser", diff --git a/docs/developers/events.md b/docs/developers/events.md index 6e95b061..03868c2d 100644 --- a/docs/developers/events.md +++ b/docs/developers/events.md @@ -153,3 +153,33 @@ Event::on(Process::class, Process::EVENT_STEP_AFTER_ELEMENT_SAVE, function(FeedP }); ``` + +## Field parsing related events + +### The `beforeParseField` event + +Triggered before a field value is parsed. Plugins can get notified before a field value is parsed. + +```php +use craft\feedme\events\FieldEvent; +use craft\feedme\services\Fields; +use yii\base\Event; + +Event::on(Fields::class, Fields::EVENT_BEFORE_PARSE_FIELD, function(FieldEvent $event) { + +}); +``` + +### The `afterParseField` event + +Triggered after a field value is parsed. Plugins can get notified before a field value is parsed and alter the parsed value. + +```php +use craft\feedme\events\FieldEvent; +use craft\feedme\services\Fields; +use yii\base\Event; + +Event::on(Fields::class, Fields::EVENT_AFTER_PARSE_FIELD, function(FieldEvent $event) { + +}); +``` \ No newline at end of file diff --git a/phpstan.neon b/phpstan.neon index 40b61126..1c6b9228 100644 --- a/phpstan.neon +++ b/phpstan.neon @@ -16,4 +16,4 @@ parameters: - src/fields/DigitalProducts.php - src/fields/SimpleMap.php - src/fields/SuperTable.php - - src/fiesds/CommerceProducts.php + - src/fields/CommerceProducts.php diff --git a/src/Plugin.php b/src/Plugin.php index d128c591..b4e9befe 100644 --- a/src/Plugin.php +++ b/src/Plugin.php @@ -62,7 +62,7 @@ public static function config(): array } public string $minVersionRequired = '4.4.0'; - public string $schemaVersion = '4.4.1'; + public $schemaVersion = '5.1.0.0'; public bool $hasCpSettings = true; public bool $hasCpSection = true; diff --git a/src/elements/Category.php b/src/elements/Category.php index e587ec81..0d85ee9f 100644 --- a/src/elements/Category.php +++ b/src/elements/Category.php @@ -181,6 +181,14 @@ protected function parseParent($feedData, $fieldInfo): ?int return $element->id; } + + // use the default value if it's provided and none of the above worked + // https://github.com/craftcms/feed-me/issues/1154 + if (!empty($default)) { + $this->element->newParentId = $default[0]; + + return $default[0]; + } // use the default value if it's provided and none of the above worked // https://github.com/craftcms/feed-me/issues/1154 diff --git a/src/elements/Entry.php b/src/elements/Entry.php index f4dbed3e..d8af9186 100644 --- a/src/elements/Entry.php +++ b/src/elements/Entry.php @@ -225,7 +225,7 @@ protected function parseParent($feedData, $fieldInfo): ?int Plugin::error('Entry error: Could not create parent - `{e}`.', ['e' => Json::encode($element->getErrors())]); } else { Plugin::info('Entry `#{id}` added.', ['id' => $element->id]); - $this->element->parentId = $element->id; + $this->element->newParentId = $element->id; } return $element->id; diff --git a/src/fields/TypedLink.php b/src/fields/TypedLink.php index a5db4f7e..1ec9e19b 100644 --- a/src/fields/TypedLink.php +++ b/src/fields/TypedLink.php @@ -54,7 +54,7 @@ public function parseField(): mixed } foreach ($fields as $subFieldHandle => $subFieldInfo) { - $preppedData[$subFieldHandle] = DataHelper::fetchValue($this->feedData, $subFieldInfo, $this->feedData); + $preppedData[$subFieldHandle] = DataHelper::fetchValue($this->feedData, $subFieldInfo, $this->feed); } // Protect against sending an empty array diff --git a/src/fields/Users.php b/src/fields/Users.php index d28ed552..367a52a4 100644 --- a/src/fields/Users.php +++ b/src/fields/Users.php @@ -107,6 +107,13 @@ public function parseField(): mixed break; } + // special provision for falling back on default BaseRelationField value + // https://github.com/craftcms/feed-me/issues/1195 + if (empty($dataValue)) { + $foundElements = $default; + break; + } + // Because we can match on element attributes and custom fields, AND we're directly using SQL // queries in our `where` below, we need to check if we need a prefix for custom fields accessing // the content table. diff --git a/src/helpers/DataHelper.php b/src/helpers/DataHelper.php index de3f15f9..ec688c0b 100644 --- a/src/helpers/DataHelper.php +++ b/src/helpers/DataHelper.php @@ -178,6 +178,11 @@ public static function fetchValue($feedData, $fieldInfo, $feed): mixed return $value; } + // If setEmptyValues is enabled allow overwriting existing data + if ($value === "" && $feed['setEmptyValues']) { + return $value; + } + // We want to preserve 0 and '0', but if it's empty, return null. // https://github.com/craftcms/feed-me/issues/779 if (!is_numeric($value) && empty($value)) { diff --git a/src/services/Process.php b/src/services/Process.php index 51558a79..cd7bd2df 100644 --- a/src/services/Process.php +++ b/src/services/Process.php @@ -534,22 +534,24 @@ public function afterProcessFeed($settings, $feed, $processedElementIds): void return; } - $elementsToDeleteDisable = array_diff($settings['existingElements'], $processedElementIds); + if ($processedElementIds) { + $elementsToDeleteDisable = array_diff($settings['existingElements'], $processedElementIds); - if ($elementsToDeleteDisable) { - if (DuplicateHelper::isDisable($feed)) { - $this->_service->disable($elementsToDeleteDisable); + if ($elementsToDeleteDisable) { + if (DuplicateHelper::isDisable($feed)) { + $this->_service->disable($elementsToDeleteDisable); $message = 'The following elements have been disabled: ' . Json::encode($elementsToDeleteDisable) . '.'; - } elseif (DuplicateHelper::isDisableForSite($feed)) { - $this->_service->disableForSite($elementsToDeleteDisable); + } elseif (DuplicateHelper::isDisableForSite($feed)) { + $this->_service->disableForSite($elementsToDeleteDisable); $message = 'The following elements have been disabled for the target site: ' . Json::encode($elementsToDeleteDisable) . '.'; - } else { - $this->_service->delete($elementsToDeleteDisable); + } else { + $this->_service->delete($elementsToDeleteDisable); $message = 'The following elements have been deleted: ' . Json::encode($elementsToDeleteDisable) . '.'; - } + } - Plugin::info($message); - Plugin::debug($message); + Plugin::info($message); + Plugin::debug($message); + } } // Log the total time taken to process the feed @@ -558,7 +560,7 @@ public function afterProcessFeed($settings, $feed, $processedElementIds): void Plugin::$stepKey = null; - $message = 'Processing ' . count($processedElementIds) . ' elements finished in ' . $execution_time . 's'; + $message = 'Processing ' . ($processedElementIds ? count($processedElementIds) : 0) . ' elements finished in ' . $execution_time . 's'; Plugin::info($message); Plugin::debug($message);