163 lines
5.5 KiB
YAML
163 lines
5.5 KiB
YAML
# https://taskfile.dev
|
|
|
|
version: '3'
|
|
|
|
output: prefixed
|
|
|
|
includes:
|
|
muenster: ./tasks/muenster.yml
|
|
siegen: ./tasks/siegen.yml
|
|
wuppertal: ./tasks/wuppertal.yml
|
|
|
|
vars:
|
|
DATE: '{{ now | date "2006-01-02"}}'
|
|
|
|
env:
|
|
OPENREFINE:
|
|
sh: readlink -e openrefine/refine
|
|
OPENREFINE_CLIENT:
|
|
sh: readlink -e openrefine/openrefine-client
|
|
|
|
tasks:
|
|
default:
|
|
desc: alle Datenquellen (parallel)
|
|
preconditions:
|
|
- sh: test -n "$(command -v metha-sync)"
|
|
msg: "requirement metha missing"
|
|
- sh: test -n "$(command -v java)"
|
|
msg: "requirement JAVA runtime environment (jre) missing"
|
|
- sh: test -x "$OPENREFINE"
|
|
msg: "requirement OpenRefine missing"
|
|
- sh: test -x "$OPENREFINE_CLIENT"
|
|
msg: "requirement openrefine-client missing"
|
|
- sh: test -n "$(command -v curl)"
|
|
msg: "requirement curl missing"
|
|
- sh: test -n "$(command -v xmllint)"
|
|
msg: "requirement xmllint missing"
|
|
deps:
|
|
- task: muenster:default
|
|
- task: wuppertal:default
|
|
- task: siegen:default
|
|
|
|
openrefine-start:
|
|
label: '{{.TASK}}-{{.PROJECT}}'
|
|
dir: data/{{.PROJECT}}/refine
|
|
cmds:
|
|
- test -n "{{.PROJECT}}"; test -n "{{.PORT}}"; test -n "{{.RAM}}"
|
|
# Temporäre Dateien löschen
|
|
- rm -rf ./*.project* && rm -f workspace.json
|
|
# OpenRefine starten und Logdatei schreiben für spätere checks
|
|
- $OPENREFINE -v warn -p {{.PORT}} -m {{.RAM}} -d $PWD > openrefine.log 2>&1 &
|
|
# Warten bis OpenRefine erreichbar ist
|
|
- timeout 30s bash -c "until curl -s http://localhost:{{.PORT}} | cat | grep -q -o OpenRefine ; do sleep 1; done"
|
|
|
|
openrefine-stop:
|
|
label: '{{.TASK}}-{{.PROJECT}}'
|
|
dir: data/{{.PROJECT}}/refine
|
|
cmds:
|
|
- test -n "{{.PROJECT}}"; test -n "{{.PORT}}"
|
|
# Statistik zu Laufzeit und Ressourcenverbrauch
|
|
- ps -o start,etime,%mem,%cpu,rss -p $(lsof -t -i:{{.PORT}})
|
|
# OpenRefine herunterfahren
|
|
- PID=$(lsof -t -i:{{.PORT}}); kill $PID; while ps -p $PID > /dev/null; do sleep 1; done
|
|
# OpenRefine-Projekt für Debugging archivieren
|
|
- tar cfz {{.PROJECT}}.openrefine.tar.gz -C $(grep -l {{.PROJECT}} *.project/metadata.json | cut -d '/' -f 1) .
|
|
|
|
check:
|
|
label: '{{.TASK}}-{{.PROJECT}}'
|
|
dir: data/{{.PROJECT}}/refine
|
|
cmds:
|
|
- test -n "{{.PROJECT}}"; test -n "{{.MINIMUM}}"
|
|
# Logdatei von OpenRefine auf Warnungen und Fehlermeldungen prüfen
|
|
- if grep -i 'exception\|error' openrefine.log; then echo 1>&2 "Logdatei $PWD/openrefine.log enthält Warnungen!" && exit 1; fi
|
|
# Prüfen, ob Mindestanzahl von 1250 Datensätzen generiert wurde
|
|
- if (( {{.MINIMUM}} > $(grep -c recordIdentifier {{.PROJECT}}.txt) )); then echo 1>&2 "Unerwartet geringe Anzahl an Datensätzen in $PWD/{{.PROJECT}}.txt!" && exit 1; fi
|
|
sources:
|
|
- openrefine.log
|
|
- '{{.PROJECT}}.txt'
|
|
|
|
split:
|
|
label: '{{.TASK}}-{{.PROJECT}}'
|
|
dir: data/{{.PROJECT}}/split
|
|
cmds:
|
|
- test -n "{{.PROJECT}}"
|
|
# in Einzeldateien aufteilen
|
|
- csplit -s -z ../refine/{{.PROJECT}}.txt '/<mets:mets /' "{*}"
|
|
# ggf. vorhandene XML-Dateien löschen
|
|
- rm -f *.xml
|
|
# Identifier als Dateinamen
|
|
- for f in xx*; do mv "$f" "$(xmllint --xpath "//*[local-name(.) = 'recordIdentifier']/text()" "$f").xml"; done
|
|
sources:
|
|
- ../refine/{{.PROJECT}}.txt
|
|
generates:
|
|
- ./*.xml
|
|
|
|
validate:
|
|
label: '{{.TASK}}-{{.PROJECT}}'
|
|
dir: data/{{.PROJECT}}
|
|
cmds:
|
|
- test -n "{{.PROJECT}}"
|
|
# Validierung gegen METS Schema
|
|
- wget -q -nc https://www.loc.gov/standards/mets/mets.xsd
|
|
- xmllint --schema mets.xsd --noout split/*.xml > validate.log 2>&1
|
|
sources:
|
|
- split/*.xml
|
|
generates:
|
|
- validate.log
|
|
|
|
zip:
|
|
label: '{{.TASK}}-{{.PROJECT}}'
|
|
dir: data/{{.PROJECT}}
|
|
cmds:
|
|
- test -n "{{.PROJECT}}"
|
|
# ZIP-Archiv mit Zeitstempel erstellen
|
|
- zip -q -FS -j {{.PROJECT}}_{{.DATE}}.zip split/*.xml
|
|
sources:
|
|
- split/*.xml
|
|
generates:
|
|
- '{{.PROJECT}}_{{.DATE}}.zip'
|
|
|
|
diff:
|
|
label: '{{.TASK}}-{{.PROJECT}}'
|
|
dir: data/{{.PROJECT}}
|
|
cmds:
|
|
- test -n "{{.PROJECT}}"
|
|
# Inhalt der beiden letzten ZIP-Archive vergleichen
|
|
- if test -n "$(ls -t *.zip | sed -n 2p)"; then unzip -q -d old $(ls -t *.zip | sed -n 2p); unzip -q -d new $(ls -t *.zip | sed -n 1p); fi
|
|
- diff -d old new > diff.log || exit 0
|
|
- rm -rf old new
|
|
# Diff prüfen, ob es weniger als 500 Zeilen enthält
|
|
- if (( 500 < $(wc -l <diff.log) )); then echo 1>&2 "Unerwartet große Änderungen in $PWD/diff.log!" && exit 1; fi
|
|
# Diff archivieren
|
|
- cp diff.log {{.PROJECT}}_{{.DATE}}.diff
|
|
sources:
|
|
- split/*.xml
|
|
generates:
|
|
- diff.log
|
|
|
|
linkcheck:
|
|
label: '{{.TASK}}-{{.PROJECT}}'
|
|
dir: data/{{.PROJECT}}
|
|
cmds:
|
|
- test -n "{{.PROJECT}}"
|
|
# Links extrahieren
|
|
- xmllint --xpath '//@*[local-name(.) = "href"]' split/*.xml | cut -d '"' -f2 > links.txt
|
|
# http status code aller Links ermitteln
|
|
- curl --silent --head --write-out "%{http_code} %{url_effective}\n" $(while read line; do echo "-o /dev/null $line"; done < links.txt) > linkcheck.log
|
|
- rm -rf links.txt
|
|
# Logdatei auf status code != 2XX prüfen
|
|
- if grep '^[^2]' linkcheck.log; then echo 1>&2 "Logdatei $PWD/linkcheck.log enthält problematische status codes!" && exit 1; fi
|
|
sources:
|
|
- split/*.xml
|
|
generates:
|
|
- linkcheck.log
|
|
|
|
delete:
|
|
label: '{{.TASK}}-{{.PROJECT}}'
|
|
dir: data/{{.PROJECT}}
|
|
cmds:
|
|
- test -n "{{.PROJECT}}"
|
|
- rm -rf harvest
|
|
- rm -rf refine
|
|
- rm -rf split
|