Multiple refactors and development on scripts/shell/job/gp folder
This commit is contained in:
253
scripts/shell/job/gp/team-pipeline-local.sh
Executable file
253
scripts/shell/job/gp/team-pipeline-local.sh
Executable file
@@ -0,0 +1,253 @@
|
||||
#!/bin/bash
|
||||
|
||||
show_help() {
|
||||
echo "Usage: $0 [--commit-hashes|-c commit hashes] [--deployments-path|-d deployments path]"
|
||||
echo "Options:"
|
||||
echo " --help | -h (Optional) Display help information on how to use this script"
|
||||
echo " --commit-hashes | -c (Required) Specify the commit hashes, separation by comma (,)"
|
||||
echo " --repo-dir | -r (Required) Specify the git repository path that contains the commits"
|
||||
echo " --deployments-path | -d (Required) Specify the Wildfly ports-XX/deployments path"
|
||||
echo " --file-paths | -f (Optional) Specify file paths to consider in the update, separation by comma (,)"
|
||||
echo " Specifying this will make the \"--commit-hashes\" parameter optional"
|
||||
echo " --build | -b (Optional) If selected, the script will build the maven components in order to generate the target/ folder"
|
||||
echo " --use-git-status | -g (Optional) Will add the files with differences from the \"git status\" command"
|
||||
}
|
||||
|
||||
commit_hashes_arg=''
|
||||
repo_dir=''
|
||||
deployments_path=''
|
||||
file_paths_arg=''
|
||||
build_maven=false
|
||||
use_git_status=false
|
||||
|
||||
while [ "$#" -gt 0 ]; do
|
||||
case "$1" in
|
||||
--help|-h) show_help; exit ;;
|
||||
--repo-dir|-r) repo_dir="$2"; shift 2;;
|
||||
--commit-hashes|-c) commit_hashes_arg="$2"; shift 2;;
|
||||
--deployments-path|-d) deployments_path="$2"; shift 2;;
|
||||
--file-paths|-f) file_paths_arg="$2"; shift 2;;
|
||||
--build|-b) build_maven=true; shift 2;;
|
||||
--use-git-status|-g) use_git_status=true; shift 2;;
|
||||
*) shift ;;
|
||||
esac
|
||||
done
|
||||
|
||||
# echo "commit_hashes_arg: $commit_hashes_arg"
|
||||
# echo "repo_dir: $repo_dir"
|
||||
# echo "deployments_path: $deployments_path"
|
||||
# echo "file_paths_arg: $file_paths_arg"
|
||||
# echo "build_maven: $build_maven"
|
||||
# echo "use_git_status: $use_git_status"
|
||||
|
||||
print_newline() {
|
||||
echo -e ""
|
||||
}
|
||||
|
||||
convert_csv_to_array() {
|
||||
csv_arg=$1
|
||||
res_arr=`echo $csv_arg | tr ',' "\n"`
|
||||
echo $res_arr
|
||||
}
|
||||
|
||||
commit_exists() {
|
||||
commit_hash=$1
|
||||
|
||||
if git cat-file -t $commit_hash; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
find_pom_xml_file_recursively() {
|
||||
path=$1
|
||||
|
||||
pom_path=`dirname $path`/pom.xml
|
||||
|
||||
if [ $pom_path == './pom.xml' ]; then
|
||||
# echo "Could not find a pom.xml file for a module"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f $pom_path ]; then
|
||||
new_dir=`dirname $path`
|
||||
pom_path=`find_pom_xml_file_recursively $new_dir`
|
||||
fi
|
||||
|
||||
echo $pom_path
|
||||
}
|
||||
|
||||
arr_uniq() {
|
||||
arr=$1
|
||||
|
||||
echo $(for i in "${arr[@]}"; do echo $i; done | sort -u)
|
||||
}
|
||||
|
||||
build_mvn() {
|
||||
mvn clean install --file ./pom.xml --settings ~/.m2/settings.xml
|
||||
}
|
||||
|
||||
function join_by {
|
||||
local d=${1-} f=${2-}
|
||||
|
||||
if shift 2; then
|
||||
printf %s "$f" "${@/#/$d}"
|
||||
fi
|
||||
}
|
||||
|
||||
remove_suffix() {
|
||||
local input="$1"
|
||||
local delimiter="-"
|
||||
local count=$(grep -o "${delimiter}" <<< "$input" | wc -l)
|
||||
local index=$(( count - 1 ))
|
||||
local result=$(cut -d"${delimiter}" -f1-"$index" <<< "$input")
|
||||
echo "$result"
|
||||
}
|
||||
|
||||
get_substring_after_string() {
|
||||
str=$1
|
||||
subs=$2
|
||||
|
||||
echo ${str#*$subs}
|
||||
}
|
||||
|
||||
if [ -z "$file_paths_arg" ] \
|
||||
&& [ -z "$commit_hashes_arg" ] \
|
||||
&& ! $use_git_status; then
|
||||
echo "Error: Either one of these arguments should be provided:"
|
||||
echo " Commit hashes (--commit-hashes or -c) or;"
|
||||
echo " File paths (--file-paths or -f) or;"
|
||||
echo " Use Git Status (--use-git-status or -g)."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -z "$commit_hashes_arg" ] && [ -z "$repo_dir" ]; then
|
||||
echo "Error: If you specify the Commit hashes (--commit-hashes or -c), the repository directory (--repo-dir or -r) is also required."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$deployments_path" ]; then
|
||||
echo "Error: Deployments path (--deployments-path or -d) is required."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
scripts_folder=$HOME/repos/personal/personal-devboot/scripts/shell/job/gp
|
||||
|
||||
if [ ! -d "$scripts_folder" ]; then
|
||||
echo "The git repository \"CloudAlb/personal-devboot\" must be in the \"~/repos\" folder!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cur=`pwd`
|
||||
|
||||
target_paths=()
|
||||
|
||||
cd $repo_dir
|
||||
if [ ! -z "$commit_hashes_arg" ]; then
|
||||
commit_hashes=`convert_csv_to_array "$commit_hashes_arg"`
|
||||
|
||||
for c in ${commit_hashes[@]}; do
|
||||
if ! commit_exists $c; then
|
||||
continue
|
||||
fi
|
||||
|
||||
target_paths+=(`git show --oneline --name-only $c | tail -n +2`)
|
||||
done
|
||||
fi
|
||||
|
||||
if $use_git_status; then
|
||||
target_paths+=(`git ls-files --modified`)
|
||||
fi
|
||||
|
||||
cd $cur
|
||||
|
||||
dot_java_files=()
|
||||
|
||||
if [ ! -z "$file_paths_arg" ]; then
|
||||
file_paths=`convert_csv_to_array "$file_paths_arg"`
|
||||
|
||||
if [ ! -f $f ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
target_paths+=("$f")
|
||||
fi
|
||||
|
||||
cd $repo_dir
|
||||
|
||||
for f in ${target_paths[@]}; do
|
||||
if [[ $f == *.java ]]; then
|
||||
dot_java_files+=(`basename $f`)
|
||||
fi
|
||||
done
|
||||
|
||||
target_paths_uniq=($(printf "%s\n" `dirname ${target_paths[@]}` | sort -u))
|
||||
|
||||
pom_paths=()
|
||||
|
||||
for i in ${target_paths_uniq[@]}; do
|
||||
res=`find_pom_xml_file_recursively $i`;
|
||||
pom_paths+=("$res")
|
||||
done
|
||||
|
||||
for p in ${pom_paths[@]}; do
|
||||
pom_dir=`dirname $p`
|
||||
cd $repo_dir/$pom_dir
|
||||
|
||||
[ $build_maven == true ] && build_mvn
|
||||
done
|
||||
|
||||
non_root_deployments_files_to_explode=()
|
||||
non_root_deployments_files_to_explode_full_path=()
|
||||
|
||||
for p in ${pom_paths[@]}; do
|
||||
pom_dir=$repo_dir/`dirname $p`
|
||||
|
||||
if [ ! -d "$pom_dir/target" ]; then
|
||||
echo "INFO: could not find the \"target\" folder in \"$pom_dir\""
|
||||
echo "INFO: initializing maven compilation..."
|
||||
|
||||
cd $pom_dir
|
||||
build_mvn
|
||||
fi
|
||||
|
||||
cd $pom_dir/target
|
||||
non_root_deployments_files_to_explode=`find * -maxdepth 0 -type f -regextype sed -regex ".*[jew]ar" ! -name '*-client.jar' ! -name '*-sources.jar'`
|
||||
done
|
||||
|
||||
cd $deployments_path
|
||||
root_deployments_files_to_explode=`find * -maxdepth 0 -type f -regextype sed -regex ".*[jew]ar"`
|
||||
|
||||
if [ ! -z "$root_deployments_files_to_explode" ]; then
|
||||
$scripts_folder/explode-java-files.sh --files `join_by , ${root_deployments_files_to_explode}` # webdeskmapreport, etc.
|
||||
fi
|
||||
|
||||
for f in ${non_root_deployments_files_to_explode[@]}; do
|
||||
f_prefix=`remove_suffix "$f"`
|
||||
file_path=`find * -maxdepth 1 -type f -name "$f_prefix*.jar"`
|
||||
|
||||
non_root_deployments_files_to_explode_full_path+=("$file_path")
|
||||
done
|
||||
|
||||
if [ ! -z "$non_root_deployments_files_to_explode_full_path"]; then
|
||||
$scripts_folder/explode-java-files.sh --files `join_by , ${non_root_deployments_files_to_explode_full_path}` # **/cpqd-geo-renderer-ejb, etc.
|
||||
fi
|
||||
|
||||
substring_path_cons="br/com/cpqd"
|
||||
|
||||
for f in ${dot_java_files[@]}; do
|
||||
class_file=${f%".java"}.class
|
||||
|
||||
class_file_paths=`find * -type f -name "$class_file"`
|
||||
|
||||
class_path_in_target=''
|
||||
for p in ${class_file_paths}; do
|
||||
substring_path=$substring_path_cons`get_substring_after_string "$p" "$substring_path_cons"`
|
||||
class_path_in_target=`find $repo_dir -type f -wholename "**/${substring_path}"`
|
||||
done
|
||||
|
||||
for p in ${class_file_paths[@]}; do
|
||||
cp --force $class_path_in_target `dirname $p`
|
||||
done
|
||||
done
|
||||
Reference in New Issue
Block a user