bash-refine/openrefine-bash-curl.sh

495 lines
14 KiB
Bash

#!/bin/bash
# openrefine-bash-curl.sh, Felix Lohmeier, v0.2, 2020-07-03
# How to control OpenRefine 3.3+ with cURL (and jq) in Bash scripts
# https://gist.github.com/felixlohmeier/d76bd27fbc4b8ab6d683822cdf61f81d
# tested on Linux (Fedora 33), needs to be adapted to work on macOS
# TODO: example for engine config (facets)
# make script executable from another directory
cd "$(dirname "${0}")" || exit 1
# ============================= CONFIG ======================================= #
# config
port="3333"
endpoint="http://localhost:${port}"
memory="1400M"
date="$(date +%Y%m%d_%H%M%S)"
workspace="${date}"
# ========================== REQUIREMENTS #=================================== #
# check requirement java
java="$(command -v java 2> /dev/null)"
if [[ -z "${java}" ]] ; then
echo 1>&2 "ERROR: OpenRefine requires JAVA runtime environment (jre)" \
"https://openjdk.java.net/install/"
exit 1
fi
# check requirement cURL
curl="$(command -v curl 2> /dev/null)"
if [[ -z "${curl}" ]] ; then
echo 1>&2 "ERROR: This shell script requires cURL" \
"https://curl.haxx.se/download.html"
exit 1
fi
# install jq 1.4 (faster startup time than 1.5 and 1.6) in this directory
if [[ ! -f "jq" ]]; then
echo "Download jq..."
curl -L --output "jq" \
"https://github.com/stedolan/jq/releases/download/jq-1.4/jq-linux-x86_64"
chmod +x "jq"
echo
fi
jq="$(readlink -f jq)"
# install OpenRefine 3.3 in subdirectory openrefine
openrefine_url="https://github.com/OpenRefine/OpenRefine/releases/download/3.3/openrefine-linux-3.3.tar.gz"
if [[ ! -d "openrefine" ]]; then
echo "Download OpenRefine..."
mkdir -p "openrefine"
curl -L --output "$(basename ${openrefine_url})" "${openrefine_url}"
echo "Install OpenRefine in subdirectory openrefine..."
tar -xzf "$(basename ${openrefine_url})" -C openrefine --strip 1 --totals
rm -f "$(basename ${openrefine_url})"
# do not try to open OpenRefine in browser
sed -i '$ a JAVA_OPTIONS=-Drefine.headless=true' \
openrefine/refine.ini
# set autosave period from 5 minutes to 25 hours
sed -i 's/#REFINE_AUTOSAVE_PERIOD=60/REFINE_AUTOSAVE_PERIOD=1500/' \
openrefine/refine.ini
# set min java heap space to allocated memory
sed -i 's/-Xms$REFINE_MIN_MEMORY/-Xms$REFINE_MEMORY/' \
openrefine/refine
echo
fi
openrefine="$(readlink -f openrefine/refine)"
# ============================ ENVIRONMENT =================================== #
function log() {
echo "$(date +%H:%M:%S.%3N) [ client] $1"
}
function start() {
${openrefine} -v warn -m "${memory}" -p "${port}" -d "${workspace}" &
pid_server=${!}
timeout 30s bash -c "until curl -s \"${endpoint}\" \
| cat | grep -q -o 'OpenRefine' ; do sleep 1; done" \
|| { echo 1>&2 "ERROR: starting OpenRefine server failed!"; stop; exit 1; }
}
function stop() {
echo
# print system resources
ps -o start,etime,%mem,%cpu,rss -p "${pid_server}"
echo
# SIGKILL (kill -9) prevents saving OpenRefine projects
{ kill -9 "${pid_server}" && wait "${pid_server}"; } 2>/dev/null
# grep log for server exceptions
grep -i 'exception\|error' "${workspace}/${date}.log" \
&& exit 1 || log "no warnings, all good!"
}
trap "stop;exit 1" SIGHUP SIGINT SIGQUIT SIGTERM
function csrf() {
response=$(curl -fsS "${endpoint}/command/core/get-csrf-token")
if [[ "${response}" != '{"token":"'* ]]; then
echo 1>&2 "ERROR: getting CSRF token failed!"; stop; exit 1
else
echo "$response" | cut -d \" -f 4
fi
}
function import() {
p[$project]=$(echo "$1" | cut -d '=' -f 2)
# error handling: exit if import failed
if [[ "${#p[$project]}" != 13 ]]; then
echo 1>&2 "$1"; stop; exit 1
else
log "loaded as project id ${p[$project]}"
fi
}
# create workspace
mkdir -p "${workspace}"
# simple logging
exec &> >(tee -a "${workspace}/${date}.log")
# declare associative array for projects
declare -A p
# =================== TEMPLATES FOR YOUR WORKFLOW ============================ #
# -------------------------- START SERVER ------------------------------------ #
echo "start OpenRefine server..."
start
echo
# ------------------------- IMPORT OPTION 1 ---------------------------------- #
# create project from heredoc
project="example1" # project id will be accessible as ${p[example1]}
echo "import ${project}..."
import "$(curl -fsS --write-out "%{redirect_url}\n" \
--form project-file="@-;filename=example1.csv" \
--form project-name="${project}" \
--form format="text/line-based/*sv" \
--form options='{"separator": " "}' \
"${endpoint}/command/core/create-project-from-upload?csrf_token=$(csrf)" \
<< "DATA"
a b c
1 2 3
0 0 0
$ \ '
DATA
)"
echo
# -------------------------- IMPORT OPTION 2 --------------------------------- #
# mockup test data
cat << DATA > "${workspace}/test.csv"
z,x,y
3,2,1
0,0,0
DATA
# create project from file
project="example2" # project id will be accessible as ${p[example2]}
echo "import ${project} from file..."
import "$(curl -fsS --write-out "%{redirect_url}\n" \
--form project-file="@${workspace}/test.csv" \
--form project-name="${project}" \
--form format="text/line-based/*sv" \
--form options='{"separator": ","}' \
"${endpoint}/command/core/create-project-from-upload?csrf_token=$(csrf)")"
echo
# ------------------------ TRANSFORM OPTION 1 -------------------------------- #
# mockup test data
cat << DATA > "${workspace}/test.json"
[
{
"op": "core/column-addition",
"engineConfig": {
"mode": "row-based"
},
"newColumnName": "test",
"columnInsertIndex": 2,
"baseColumnName": "b",
"expression": "grel:value.replace('2','FILE')",
"onError": "set-to-blank"
}
]
DATA
# apply operation from file
echo "add column test..."
curl -fsS \
--data project="${p[example1]}" \
--data-urlencode operations@"${workspace}/test.json" \
"${endpoint}/command/core/apply-operations?csrf_token=$(csrf)" \
|| { stop; exit 1; }
echo; echo
# ------------------------ TRANSFORM OPTION 2 -------------------------------- #
# apply operation from quoted heredoc
echo "add column test2..."
curl -fsS \
--data project="${p[example1]}" \
--data-urlencode "operations@-" \
"${endpoint}/command/core/apply-operations?csrf_token=$(csrf)" \
<< "JSON" || { stop; exit 1; }
[
{
"op": "core/column-addition",
"engineConfig": {
"mode": "row-based"
},
"newColumnName": "test2",
"columnInsertIndex": 2,
"baseColumnName": "b",
"expression": "grel:value.replace('2','FOO')",
"onError": "set-to-blank"
}
]
JSON
echo; echo
# ------------------------ TRANSFORM OPTION 3 -------------------------------- #
# apply operation from unquoted heredoc (allows using bash variables)
echo "add column test3..."
new_column="test3"
base_column="b"
replace_value="BAR"
curl -fsS \
--data project="${p[example1]}" \
--data-urlencode "operations@-" \
"${endpoint}/command/core/apply-operations?csrf_token=$(csrf)" \
<< JSON || { stop; exit 1; }
[
{
"op": "core/column-addition",
"engineConfig": {
"mode": "row-based"
},
"newColumnName": "${new_column}",
"columnInsertIndex": 3,
"baseColumnName": "${base_column}",
"expression": "grel:value.replace('2','${replace_value}')",
"onError": "set-to-blank"
}
]
JSON
echo; echo
# ------------------------ TRANSFORM OPTION 4 -------------------------------- #
# apply operation from unquoted heredoc with multi-line expression (requires jq)
echo "add column test4..."
replace_value="!"
read -r -d '' expression << EXPRESSION
grel:value.replace(
'2',
'${replace_value}'
)
EXPRESSION
curl -fsS \
--data project="${p[example1]}" \
--data-urlencode "operations@-" \
"${endpoint}/command/core/apply-operations?csrf_token=$(csrf)" \
<< JSON || { stop; exit 1; }
[
{
"op": "core/column-addition",
"engineConfig": {
"mode": "row-based"
},
"newColumnName": "test4",
"columnInsertIndex": 4,
"baseColumnName": "b",
"expression": $(echo "${expression}" | ${jq} -s -R '.'),
"onError": "set-to-blank"
}
]
JSON
echo; echo
# ------------------------ TRANSFORM OPTION 5 -------------------------------- #
# apply multiple operations generated on-the-fly (requires jq)
echo "delete columns..."
columns=( "test" "test2" "test3" )
payload=()
for column in "${columns[@]}"; do
payload+=( "$(cat << JSON
[
{
"op": "core/column-removal",
"columnName": "${column}"
}
]
JSON
)" )
done
echo "${payload[@]}" | "${jq}" -s add | curl -fsS \
--data project="${p[example1]}" \
--data-urlencode operations@- \
"${endpoint}/command/core/apply-operations?csrf_token=$(csrf)" \
|| { stop; exit 1; }
echo; echo
# -------------------------- EXPORT OPTION 1 --------------------------------- #
# export to stdout
echo "export example1..."
curl -fsS \
--data project="${p[example1]}" \
--data format="tsv" \
--data engine='{"facets":[],"mode":"row-based"}' \
"${endpoint}/command/core/export-rows" \
|| { stop; exit 1; }
echo
# -------------------------- EXPORT OPTION 2 --------------------------------- #
# export to file
output="${workspace}/example1.csv"
echo "export example1..."
curl -fsS \
--data project="${p[example1]}" \
--data format="csv" \
--data engine='{"facets":[],"mode":"row-based"}' \
"${endpoint}/command/core/export-rows" \
> "${output}" \
|| { stop; exit 1; } \
&& log "saved to file ${output}"
echo
# -------------------------- EXPORT OPTION 3 --------------------------------- #
# templating export to stdout
echo "export example2 using template..."
IFS= read -r -d '' template << TEMPLATE
{
"z": {{cells['z'].value.jsonize()}},
"y": {{cells['y'].value.jsonize()}}
}
TEMPLATE
echo "${template}" | head -c -2 | curl -fsS \
--data project="${p[example2]}" \
--data format="template" \
--data prefix="[
" \
--data suffix="
]" \
--data separator=",
" \
--data engine='{"facets":[],"mode":"row-based"}' \
--data-urlencode template@- \
"${endpoint}/command/core/export-rows" \
|| { stop; exit 1; }
echo; echo
# -------------------------- EXPORT OPTION 4 --------------------------------- #
# templating export to file
output="${workspace}/example2.json"
echo "export example2 using template..."
IFS= read -r -d '' template << TEMPLATE
{
"z": {{cells['z'].value.jsonize()}},
"y": {{cells['y'].value.jsonize()}}
}
TEMPLATE
echo "${template}" | head -c -2 | curl -fsS \
--data project="${p[example2]}" \
--data format="template" \
--data prefix="[
" \
--data suffix="
]" \
--data separator=",
" \
--data engine='{"facets":[],"mode":"row-based"}' \
--data-urlencode template@- \
"${endpoint}/command/core/export-rows" \
> "${output}" \
|| { stop; exit 1; } \
&& log "saved to file ${output}"
echo; echo
# -------------------------- EXPORT OPTION 5 --------------------------------- #
# export projects to files (example for parallel execution)
projects=( "example1" "example2" )
format="tsv"
echo "export ${projects[*]} to files..."
pid=()
for project in "${projects[@]}"; do
curl -fs \
--data project="${p[$project]}" \
--data format="${format}" \
--data engine='{"facets":[],"mode":"row-based"}' \
"${endpoint}/command/core/export-rows" \
> "${workspace}/${project}.${format}" &
pid+=("$!")
done
for i in "${!projects[@]}"; do
wait "${pid[$i]}" \
|| { echo 1>&2 "ERROR: export of ${projects[$i]} failed!"; stop; exit 1; } \
&& log "${projects[$i]} saved to file ${workspace}/${projects[$i]}.${format}"
done
echo
# -------------------------- LIST PROJECTS ----------------------------------- #
# print id and name for each project (requires jq)
echo "list projects..."
curl -fsS --get \
"${endpoint}/command/core/get-all-project-metadata" \
| "${jq}" -r '.projects | keys[] as $k | "\($k): \(.[$k] | .name)"' \
|| { stop; exit 1; }
echo
# -------------------------- GET METADATA ------------------------------------ #
# print metadata (requires jq)
echo "metadata for project example1..."
curl -fsS --get \
--data project="${p[example1]}" \
"${endpoint}/command/core/get-project-metadata" \
| "${jq}" "{ id: ${p[example1]} } + ." \
|| { stop; exit 1; }
echo
# ---------------------------- GET ROWS -------------------------------------- #
# print total number of rows (requires jq)
echo "total number of rows in project example1..."
curl -fsS --get \
--data project="${p[example1]}" \
"${endpoint}/command/core/get-rows" \
| "${jq}" -r '.total' \
|| { stop; exit 1; }
echo
# -------------------------- GET COLUMNS ------------------------------------- #
# print columns (requires jq)
echo "column names of project example1..."
curl -fsS --get \
--data project="${p[example1]}" \
"${endpoint}/command/core/get-models" \
| "${jq}" -r '.columnModel | .columns[] | .name' \
|| { stop; exit 1; }
echo
# ---------------------- GET OPERATIONS HISTORY ------------------------------ #
# save operations history to file (requires jq)
output="${workspace}/example1_history.json"
echo "operations history for project example1..."
curl -fsS --get \
--data project="${p[example1]}" \
"${endpoint}/command/core/get-operations" \
| "${jq}" '[ .entries[] | .operation ]' \
> "${output}" \
|| { stop; exit 1; } \
&& log "saved to file ${output}"
echo
# ------------------------ GET IMPORT History -------------------------------- #
# print import options history (requires jq)
echo "print import options history for project example2..."
curl -fsS --get \
--data project="${p[example2]}" \
"${endpoint}/command/core/get-project-metadata" \
| "${jq}" ".importOptionMetadata[0]" \
|| { stop; exit 1; }
echo
# ------------------------- DELETE project ----------------------------------- #
# delete project
echo "delete project example1..."
curl -fsS \
--data project="${p[example1]}" \
"${endpoint}/command/core/delete-project?csrf_token=$(csrf)" \
|| { stop; exit 1; }
echo; echo
# --------------------------- STOP SERVER ------------------------------------ #
echo "stop OpenRefine server..."
stop
echo