535 lines
17 KiB
Bash
Executable File
535 lines
17 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
printf "\n"
|
|
|
|
show_help() {
|
|
echo "Usage: $0 [--commit-hashes|-c commit hashes] [--deployments-path|-d deployments path]"
|
|
echo "Options:"
|
|
echo " --help | -h (Optional) Display help information on how to use this script"
|
|
echo " --commit-hashes | -c (Required) Specify the commit hashes, separation by comma (,)"
|
|
echo " --commits-between | --cb (Required) Specify two commit hashes, separation by comma (,), so all commits between them will be considered"
|
|
echo " --repo-dir | -r (Required) Specify the git repository path that contains the commits"
|
|
echo " --deployments-path | -d (Required) Specify the Wildfly ports-XX/deployments path"
|
|
echo " --file-paths | -f (Optional) Specify file paths to consider in the update, separation by comma (,)"
|
|
echo " Specifying this will make the \"--commit-hashes\" parameter optional"
|
|
echo " --no-build | --nb (Optional) If selected, the script will NOT build the maven components in order to generate the target/ folder"
|
|
echo " --use-git-status | -g (Optional) Will add the files with differences from the \"git status\" command"
|
|
echo " --no-substitution | --ns (Optional) If selected, the script will NOT build the maven components in order to generate the target/ folder"
|
|
}
|
|
|
|
commit_hashes_arg=''
|
|
commits_between_arg=''
|
|
repo_dir=''
|
|
deployments_path=''
|
|
file_paths_arg=''
|
|
build_maven=true
|
|
use_git_status=false
|
|
should_substitute=true
|
|
|
|
while [ "$#" -gt 0 ]; do
|
|
case "$1" in
|
|
--help|-h) show_help; exit ;;
|
|
--repo-dir|-r) repo_dir="$2"; shift 2;;
|
|
--commit-hashes|-c) commit_hashes_arg="$2"; shift 2;;
|
|
--commits-between|-cb) commits_between_arg="$2"; shift 2;;
|
|
--deployments-path|-d) deployments_path="$2"; shift 2;;
|
|
--file-paths|-f) file_paths_arg="$2"; shift 2;;
|
|
--no-build|--nb) build_maven=false; shift 2;;
|
|
--use-git-status|-g) use_git_status=true; shift 2;;
|
|
--no-substitution|--ns) should_substitute=false; shift 2;;
|
|
*) shift ;;
|
|
esac
|
|
done
|
|
|
|
print_newline() {
|
|
echo -e ""
|
|
}
|
|
|
|
convert_csv_to_array() {
|
|
csv_arg=$1
|
|
res_arr=`echo $csv_arg | tr ',' "\n"`
|
|
echo $res_arr
|
|
}
|
|
|
|
commit_exists() {
|
|
commit_hash=$1
|
|
|
|
git cat-file -t $commit_hash 1> /dev/null 2> /dev/null
|
|
|
|
if [ $? -eq 0 ]; then
|
|
return 0
|
|
else
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
get_target_paths_from_commit_hash() {
|
|
commit_hash=$1
|
|
|
|
# TODO, fazer tratativa exclusiva para diff filter com A (added)
|
|
# TODO, fazer tratativa exclusiva para diff filter com D (deleted)
|
|
echo `git show --oneline --name-only --diff-filter=MA $c | tail -n +2`
|
|
}
|
|
|
|
find_pom_xml_file_recursively() {
|
|
path=$1
|
|
|
|
pom_path=`dirname $path`/pom.xml
|
|
|
|
if [ $pom_path == './pom.xml' ]; then
|
|
# echo "Could not find a pom.xml file for a module"
|
|
exit 1
|
|
fi
|
|
|
|
if [ ! -f $pom_path ]; then
|
|
new_dir=`dirname $path`
|
|
pom_path=`find_pom_xml_file_recursively $new_dir`
|
|
fi
|
|
|
|
echo $pom_path
|
|
}
|
|
|
|
build_mvn() {
|
|
build_cur=`pwd`
|
|
|
|
printf "\n"
|
|
echo "[INFO] Realizando build Maven:"
|
|
echo " ${build_cur#"$repo_dir"}"
|
|
|
|
mvn --quiet clean install --file ./pom.xml --settings ~/.m2/settings.xml
|
|
|
|
if [ $? -ne 0 ]; then
|
|
echo "[INFO] A build Maven resultou em erros. O script irá encerrar agora."
|
|
exit 1
|
|
fi
|
|
|
|
echo "[INFO] Build Maven concluída."
|
|
}
|
|
|
|
function join_by {
|
|
local d=${1-} f=${2-}
|
|
|
|
if shift 2; then
|
|
printf %s "$f" "${@/#/$d}"
|
|
fi
|
|
}
|
|
|
|
remove_suffix() {
|
|
local input="$1"
|
|
local delimiter="-"
|
|
local count=$(grep -o "${delimiter}" <<< "$input" | wc -l)
|
|
local index=$(( count - 1 ))
|
|
local result=$(cut -d"${delimiter}" -f1-"$index" <<< "$input")
|
|
echo "$result"
|
|
}
|
|
|
|
get_substring_before_string() {
|
|
str=$1
|
|
subs=$2
|
|
|
|
echo ${str%$subs*}
|
|
}
|
|
|
|
get_substring_after_string() {
|
|
str=$1
|
|
subs=$2
|
|
|
|
echo ${str#*$subs}
|
|
}
|
|
|
|
sort_pom_paths_for_compilation_order() {
|
|
local unsorted_pom_paths=("$@")
|
|
local -A compilation_order_list=(
|
|
['main']=''
|
|
['commons']=''
|
|
['security']=''
|
|
['tools']=''
|
|
['geo']=''
|
|
['meta']=''
|
|
['framework']=''
|
|
['faces']=''
|
|
['application']=''
|
|
['plugins']=''
|
|
['cdk']=''
|
|
['auditing']=''
|
|
)
|
|
local sorted_pom_paths=()
|
|
|
|
for dir in "${!compilation_order_list[@]}"; do
|
|
for path in "${unsorted_pom_paths[@]}"; do
|
|
if [[ $path == "${dir}/"* ]]; then
|
|
sorted_pom_paths+=("$path")
|
|
fi
|
|
done
|
|
done
|
|
|
|
printf "%s\n" "${sorted_pom_paths[@]}"
|
|
}
|
|
|
|
function confirm() {
|
|
while true; do
|
|
read -p '' yn
|
|
case $yn in
|
|
[Yy]* ) return 0;;
|
|
[Nn]* ) return 1;;
|
|
[Cc]* ) exit;;
|
|
* ) echo "Please answer YES, NO, or CANCEL.";;
|
|
esac
|
|
done
|
|
}
|
|
|
|
if [ -z "$file_paths_arg" ] \
|
|
&& [ -z "$commit_hashes_arg" ] \
|
|
&& [ -z "$commits_between_arg" ] \
|
|
&& ! $use_git_status; then
|
|
echo "Error: Either one of these arguments should be provided:"
|
|
echo " Commit hashes (--commit-hashes or -c) or;"
|
|
echo " Commits between (--commits-between or -cb) or;"
|
|
echo " File paths (--file-paths or -f) or;"
|
|
echo " Use Git Status (--use-git-status or -g)."
|
|
exit 1
|
|
fi
|
|
|
|
if [ ! -z "$repo_dir" ]; then
|
|
if [ -z "$commit_hashes_arg" ] && [ -z "$commits_between_arg" ] && [ ! $use_git_status ]; then
|
|
echo "Error: If you specify the Commit hashes (--commit-hashes or -c) and/or Commits between (--commits-between or -cb), the repository directory (--repo-dir or -r) is also required."
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
if [ -z "$deployments_path" ]; then
|
|
echo "Error: Deployments path (--deployments-path or -d) is required."
|
|
exit 1
|
|
fi
|
|
|
|
scripts_folder=$HOME/repos/personal/personal-devboot/scripts/shell/job/gp
|
|
|
|
if [ ! -d "$scripts_folder" ]; then
|
|
echo "The git repository \"CloudAlb/personal-devboot\" must be in the \"~/repos\" folder!"
|
|
exit 1
|
|
fi
|
|
|
|
cur=`pwd`
|
|
wildfly_files_suffix_regex='-[0-9].*\.[jew]ar'
|
|
|
|
target_paths=()
|
|
|
|
cd $repo_dir
|
|
|
|
if [ ! -z "$commit_hashes_arg" ]; then
|
|
commit_hashes=`convert_csv_to_array "$commit_hashes_arg"`
|
|
|
|
for c in ${commit_hashes[@]}; do
|
|
if ! commit_exists $c; then
|
|
continue
|
|
fi
|
|
|
|
target_paths+=(`get_target_paths_from_commit_hash "$c"`)
|
|
done
|
|
fi
|
|
|
|
if [ ! -z "$commits_between_arg" ]; then
|
|
commit_hashes=`git rev-list --ancestry-path "${commits_between_arg/","/".."}"`
|
|
target_paths_arg=()
|
|
|
|
for c in ${commit_hashes[@]}; do
|
|
if ! commit_exists $c; then
|
|
continue
|
|
fi
|
|
|
|
target_paths_arg+=(`get_target_paths_from_commit_hash "$c"`)
|
|
done
|
|
|
|
for i in ${target_paths_arg[@]}; do
|
|
target_paths+=($i)
|
|
done
|
|
fi
|
|
|
|
exit 0
|
|
|
|
if [ ! -z "$file_paths_arg" ]; then
|
|
file_paths=`convert_csv_to_array "$file_paths_arg"`
|
|
|
|
for f in ${file_paths[@]}; do
|
|
if [ ! -f "$repo_dir/$f" ]; then
|
|
echo "[WARN] O arquivo informado \"$f\" não existe no repositório."
|
|
continue
|
|
fi
|
|
|
|
target_paths+=($f)
|
|
done
|
|
fi
|
|
|
|
if $use_git_status; then
|
|
target_paths+=(`git ls-files --modified`)
|
|
|
|
if [ ${#target_paths[@]} -eq 0 ]; then
|
|
echo "Error: You specified \"--use-git-status\" but there wasn't any modified files to use."
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
target_paths_uniq=$(for i in "${target_paths[@]}"; do echo $i; done | sort -u)
|
|
|
|
dot_java_files=()
|
|
pom_paths=()
|
|
|
|
# TODO, add support for html, js and Messages_*.properties files
|
|
# html_files=()
|
|
# js_files=()
|
|
# msg_files=()
|
|
|
|
for i in ${target_paths_uniq[@]}; do
|
|
if [[ $i == *.java ]]; then
|
|
dot_java_files+=("$i")
|
|
fi
|
|
|
|
res=`find_pom_xml_file_recursively $i`;
|
|
pom_paths+=("$res")
|
|
done
|
|
|
|
pom_paths_uniq=$(for i in "${pom_paths[@]}"; do echo $i; done | sort -u)
|
|
|
|
sorted_pom_paths_uniq=($(sort_pom_paths_for_compilation_order "${pom_paths_uniq[@]}"))
|
|
|
|
echo "[INFO] Pom order"
|
|
|
|
for p in ${sorted_pom_paths_uniq[@]}; do
|
|
echo " $p"
|
|
done
|
|
|
|
for p in ${sorted_pom_paths_uniq[@]}; do
|
|
pom_dir=`dirname $p`
|
|
cd $repo_dir/$pom_dir
|
|
|
|
[ $build_maven == true ] && build_mvn
|
|
done
|
|
|
|
# alternative for artifactId search
|
|
# non_root_deployments_files_to_explode=()
|
|
# non_root_deployments_files_to_explode_full_path=()
|
|
|
|
artifact_ids_to_explode=()
|
|
for p in ${sorted_pom_paths_uniq[@]}; do
|
|
pom_dir=$repo_dir/`dirname $p`
|
|
artifact_ids_to_explode+=(`xmlstarlet sel -N pom="http://maven.apache.org/POM/4.0.0" -t -v '/pom:project/pom:artifactId' $pom_dir/pom.xml`)
|
|
|
|
if [ ! -d "$pom_dir/target" ]; then
|
|
echo "[INFO] Could not find the \"target\" folder in \"$pom_dir\""
|
|
echo "[INFO] Initializing maven compilation..."
|
|
|
|
cd $pom_dir
|
|
build_mvn
|
|
fi
|
|
|
|
# cd $pom_dir/target
|
|
# non_root_deployments_files_to_explode+=(`find * -maxdepth 0 -type f -regextype sed -regex ".*[jew]ar" ! -name '*-client.jar' ! -name '*-sources.jar'`)
|
|
done
|
|
|
|
echo "artifact_ids_to_explode"
|
|
for i in "${artifact_ids_to_explode[@]}"; do
|
|
echo " $i"
|
|
done
|
|
|
|
# adding some artifact IDs because "cpqd-application" might have a sibling called "cpqd-etics"
|
|
extra_artifact_ids_to_explode=()
|
|
for a in ${artifact_ids_to_explode[@]}; do
|
|
extra_artifact_id="${a/-application-/-etics-}"
|
|
|
|
# this way I don't have to create a "extra_artifact_ids_to_explode_uniq" variable
|
|
if [ "$a" != "$extra_artifact_id" ]; then
|
|
extra_artifact_ids_to_explode+=($extra_artifact_id)
|
|
fi
|
|
done
|
|
|
|
printf "\n"
|
|
|
|
echo "extra_artifact_ids_to_explode"
|
|
for i in "${extra_artifact_ids_to_explode[@]}"; do
|
|
echo " $i"
|
|
done
|
|
|
|
if $should_substitute; then
|
|
cd $deployments_path
|
|
root_deployments_files_to_explode=`find * -maxdepth 0 -type f -regex ".*$wildfly_files_suffix_regex"`
|
|
|
|
if [ ! -z "$root_deployments_files_to_explode" ]; then
|
|
$scripts_folder/explode-java-files.sh --files `join_by , ${root_deployments_files_to_explode}` # webdeskmapreport, etc.
|
|
fi
|
|
|
|
printf "\n"
|
|
|
|
echo "extra artifactId_paths_to_explode"
|
|
for a in ${extra_artifact_ids_to_explode[@]}; do
|
|
echo "find * -type f -regex \".*$a$wildfly_files_suffix_regex\""
|
|
artifactId_paths_to_explode=(`find * -type f -regex ".*$a$wildfly_files_suffix_regex"`)
|
|
echo " $artifactId_paths_to_explode"
|
|
# if [ ! -z "$artifactId_paths_to_explode" ]; then
|
|
# $scripts_folder/explode-java-files.sh --files `join_by , ${artifactId_paths_to_explode}`
|
|
# fi
|
|
done
|
|
|
|
printf "\n"
|
|
|
|
echo "artifactId_paths_to_explode"
|
|
# exploding files by found artifactIds
|
|
for a in ${artifact_ids_to_explode[@]}; do
|
|
artifactId_paths_to_explode=(`find * -type f -regex ".*$a$wildfly_files_suffix_regex"`)
|
|
echo " $artifactId_paths_to_explode"
|
|
# if [ ! -z "$artifactId_paths_to_explode" ]; then
|
|
# $scripts_folder/explode-java-files.sh --files `join_by , ${artifactId_paths_to_explode}`
|
|
# fi
|
|
done
|
|
|
|
src_path_const='src/main/java/'
|
|
|
|
# TODO, add known exceptions to automatically search for alternative target paths and deployment paths
|
|
# known_exceptions_alternative_target_paths=(CacheGeneratorCleanupStartupBean.java)
|
|
# known_exceptions_alternative_deployments_paths=(CacheGeneratorCleanupStartupBean.java)
|
|
|
|
unsuccessful_copies=()
|
|
|
|
for f in ${dot_java_files[@]}; do
|
|
f_dirname=`dirname $f`
|
|
f_basename=`basename $f`
|
|
|
|
class_filename=`echo ${f_basename%".java"}.class`
|
|
|
|
path_in_deployments=`get_substring_after_string "$f_dirname" "$src_path_const"`
|
|
class_file_paths_in_deployments=($(find $deployments_path -type f -wholename "**/$path_in_deployments/$class_filename"))
|
|
|
|
middle_path_subst=`get_substring_before_string "$f_dirname" "$src_path_const"`
|
|
class_file_path_in_target=`find $repo_dir -type f -wholename "**/${middle_path_subst}target/classes/$path_in_deployments/$class_filename"`
|
|
|
|
is_done=false
|
|
|
|
if [ ! -z "$class_file_paths_in_deployments" ]; then
|
|
is_done=true
|
|
else
|
|
# echo "comando: find $deployments_path -type f -wholename \"**/$path_in_deployments/$class_filename\""
|
|
|
|
# procurando de forma mais abrangente
|
|
bigger_search=`find $deployments_path -type f -wholename "**/$class_filename"`
|
|
if [ ! -z "$bigger_search" ]; then
|
|
printf "\n"
|
|
echo "[INFO] O arquivo \"$f_basename\" não foi encontrado no diretório esperado em deployments, mas há caminho(s) alternativo(s):"
|
|
for f in ${bigger_search[@]}; do
|
|
echo " $f"
|
|
done
|
|
printf "\n"
|
|
echo "[ASK] Você deseja copiar para esse(s) caminho(s)?"
|
|
if confirm; then
|
|
class_file_paths_in_deployments=$bigger_search
|
|
is_done=true
|
|
fi
|
|
fi
|
|
fi
|
|
|
|
if ! $is_done; then
|
|
printf "\n"
|
|
echo "[WARN] O arquivo \"$f_basename\" não foi encontrado em algum diretório do deployments. Talvez ele precise ser copiado manualmente?"
|
|
echo "[ASK] Se for um arquivo novo, você sabe de alguma classe vizinha dele?"
|
|
|
|
if confirm; then
|
|
# etapas para ver se o script criará um arquivo placeholder para fazer a busca depois
|
|
printf "\n"
|
|
echo "Informe o nome do arquivo (ex.: \"FilterResult.java\" - sem aspas):"
|
|
read neighbour_filename
|
|
neighbour_class_filename=`echo ${neighbour_filename%".java"}.class`
|
|
neighbour_class_filename_in_deployments=`find $deployments_path -type f -wholename "**/$path_in_deployments/$neighbour_class_filename"` # pode dar mais de um resultado
|
|
|
|
if [ ! -z "$neighbour_class_filename_in_deployments" ]; then
|
|
printf "\n"
|
|
echo "O arquivo vizinho informado existe em deployments. Caminho(s):"
|
|
|
|
for d in ${neighbour_class_filename_in_deployments[@]}; do
|
|
echo " $d"
|
|
done
|
|
|
|
printf "\n"
|
|
echo "[ASK] Deseja copiar o arquivo original que não foi encontrado no(s) mesmo(s) diretório(s)?"
|
|
if confirm; then
|
|
for d in ${neighbour_class_filename_in_deployments[@]}; do
|
|
touch "`dirname $d`/$class_filename"
|
|
done
|
|
else
|
|
unsuccessful_copies+=($f_basename)
|
|
printf "\n"
|
|
echo "[INFO] Pulando cópia no mesmo diretório do arquivo vizinho..."
|
|
fi
|
|
fi
|
|
else
|
|
unsuccessful_copies+=($f_basename)
|
|
echo "[INFO] Pulando cópia..."
|
|
fi
|
|
fi
|
|
|
|
for p in ${class_file_paths_in_deployments[@]}; do
|
|
if [ -z "$class_file_path_in_target" ]; then
|
|
printf "\n"
|
|
echo "[WARN] O arquivo \"$f_basename\" não foi encontrado em uma pasta \"target\" fixa (\"$middle_path_subst\") do repositório"
|
|
|
|
echo "[ASK] Deseja tentar fazer uma busca mais abrangente?"
|
|
if confirm; then
|
|
new_class_file_path_in_target=`find $repo_dir -type f -wholename "**/target/classes/**/$class_filename"`
|
|
|
|
if [ ! -z "$new_class_file_path_in_target" ]; then
|
|
printf "\n"
|
|
echo "Foi encontrado um caminho válido, mais abrangente, para o arquivo:"
|
|
echo " $new_class_file_path_in_target"
|
|
|
|
printf "\n"
|
|
echo "[ASK] Deseja usar ele na cópia?"
|
|
|
|
if confirm; then
|
|
# defino como essa variável direto porque as próximas iterações, que serão do mesmo arquivo, usarão o novo caminho
|
|
class_file_path_in_target=$new_class_file_path_in_target
|
|
else
|
|
unsuccessful_copies+=($f_basename)
|
|
continue
|
|
fi
|
|
else
|
|
unsuccessful_copies+=($f_basename)
|
|
printf "\n"
|
|
echo "[INFO] Não foi possível encontrar um caminho válido, mais abrangente, para o arquivo. Pulando cópia..."
|
|
continue
|
|
fi
|
|
else
|
|
unsuccessful_copies+=($f_basename)
|
|
continue
|
|
fi
|
|
fi
|
|
|
|
|
|
cp_dest=`dirname $p`
|
|
|
|
printf "\n"
|
|
echo "[INFO] Copiando arquivo:"
|
|
echo " De: $class_file_path_in_target"
|
|
echo " Para: $cp_dest"
|
|
|
|
cp --force $class_file_path_in_target $cp_dest
|
|
|
|
if [ $? -eq 0 ]; then
|
|
echo "[INFO] Concluído."
|
|
else
|
|
printf "\n"
|
|
echo "[INFO] A cópia dos artefatos resultou em erros."
|
|
exit 1
|
|
fi
|
|
done
|
|
done
|
|
|
|
if [ "${#unsuccessful_copies[@]}" -ne 0 ]; then
|
|
unsuccessful_copies_uniq=$(for i in "${unsuccessful_copies[@]}"; do echo $i; done | sort -u)
|
|
|
|
printf "\n"
|
|
echo "[WARN] Houveram arquivos que não puderam ser atualizados. Consulte o log do script."
|
|
|
|
echo "Arquivo mal-sucedidos"
|
|
for i in ${unsuccessful_copies_uniq[@]}; do
|
|
echo " $i"
|
|
done
|
|
fi
|
|
fi
|