Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
path: root/misc/offline-sync
diff options
context:
space:
mode:
Diffstat (limited to 'misc/offline-sync')
-rwxr-xr-xmisc/offline-sync196
1 files changed, 196 insertions, 0 deletions
diff --git a/misc/offline-sync b/misc/offline-sync
new file mode 100755
index 0000000..b743788
--- /dev/null
+++ b/misc/offline-sync
@@ -0,0 +1,196 @@
+#!/bin/sh
+
+# Copyright (C) 2012 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+[ "${V}" ] && set -x
+
+info() {
+ echo "-- $@"
+}
+
+phase() {
+ echo " $@"
+}
+
+
+warning() {
+ echo "-- $@"
+}
+
+abort() {
+ rm -f *.sync
+ echo "-- $@"
+ echo " run \"V=1 $0 ${cmdline}\" to get full debugging output"
+ exit 1
+}
+
+help() {
+ cat <<EOF
+Usage: $(basename $0) [PATH] [URL]
+
+Sugar Network sneakernet synchronization utility.
+
+Command arguments:
+ PATH if specified, utility will try to recursive search for
+ synchronization packet files (files with ".packet" suffix);
+ using wget or curl utility, each packet will be uploaded
+ to the targeting Sugar Network server with downloading resulting
+ packets; on success, uploaded packets will be removed and resulting
+ packets will be placed to PATH instead
+ URL if specified, should be Sugar Network API url, e.g.,
+ http://api-testing.network.sugarlabs.org; script will download
+ full data dump from the server
+
+Utility is intended to upload request packet files (generated by Sugar Network
+node servers) to Sugar Network master server and download response packets
+to deliver them back to nodes.
+
+See http://wiki.sugarlabs.org/go/Sugar_Network for details.
+EOF
+}
+
+get_hostname() {
+ echo $@ | awk -F/ '{print $3}'
+}
+
+get_header_key() {
+ local packet="$1"
+ local key="$2"
+
+ tar -xf "${packet}" header --to-stdout | \
+ grep -o "\"${key}\":[^,}]\+" | \
+ sed 's/^[^:]*://; s/^[ "]*//; s/"$//'
+}
+
+upload() {
+ local url="$1"
+ local cookie="$2"
+ local in_packet="$3"
+ local out_packet="${in_packet}.sync"
+
+ [ -e "${out_packet}" ] && abort "Destination ${out_packet} already exists"
+
+ if which wget >/dev/null 2>&1; then
+ cmd="wget --output-document=${out_packet} --load-cookies=${cookie} --save-cookies=${cookie}"
+ [ "${in_packet}" ] && cmd="${cmd} --post-file=${in_packet}"
+ [ "${V}" ] && cmd="${cmd} --server-response" || cmd="${cmd} --quiet"
+ else
+ cmd="curl --output ${out_packet} --cookie ${cookie} --cookie-jar ${cookie}"
+ [ "${in_packet}" ] && cmd="${cmd} -XPOST --data-binary @${in_packet}"
+ [ "${V}" ] || cmd="${cmd} --silent"
+ fi
+
+ ${cmd} "${url}" || abort "Cannot run upload command"
+
+ if [ -e "${out_packet}" ]; then
+ if [ $(stat -c %s "${out_packet}") -eq 0 ]; then
+ rm "${out_packet}"
+ else
+ out_filename="$(get_header_key "${out_packet}" filename)"
+ phase "Store results in ${out_filename}"
+ mv "${out_packet}" "${out_filename}" || abort "Cannot write ${out_filename}"
+ fi
+ fi
+
+ if [ -e "${cookie}" ]; then
+ if grep unset_sugar_network_sync "${cookie}" >/dev/null; then
+ rm "${cookie}"
+ elif grep sugar_network_sync "${cookie}" >/dev/null; then
+ phase "Postpone pull with ${cookie}"
+ else
+ rm "${cookie}"
+ fi
+ fi
+
+ if [ "${in_packet}" ]; then
+ phase "Remove uploaded file"
+ rm "${in_packet}"
+ fi
+}
+
+pull() {
+ local api_url="$1?cmd=pull"
+ local cookie="$(get_hostname $1).cookie"
+
+ while true; do
+ local disk_free=$(df . --block-size=1 | tail -n1 | awk '{print $4}')
+ [ $disk_free -gt $disk_limit ] || abort \
+ "No free disk space on $PWD, copy all *.cookie files" \
+ "to directory with more free space and re-run $0 from there"
+
+ upload "${api_url}&accept_length=${disk_free}" "${cookie}"
+ [ -e "${cookie}" ] || break
+
+ delay=$(grep -o 'sugar_network_delay[[:space:]]*[0-9]\+' "${cookie}" 2>/dev/null | awk '{print $2}')
+ if [ "${delay}" ]; then
+ phase "Server started processing pull, check for results in ${delay} seconds"
+ sleep ${delay}
+ else
+ phase "Pull postponed updates"
+ fi
+ done
+}
+
+sync_path="$1"
+clone_url="$2"
+cmdline="$@"
+FS=
+disk_limit=$(expr 1024 \* 1024 \* 10)
+
+if [ $# -eq 0 ]; then
+ if [ -e "$(dirname $0)/.sugar-network-sync" ]; then
+ # Script was launched from sync directory, so, process sync
+ sync_path="$(dirname $0)"
+ else
+ help
+ exit 0
+ fi
+fi
+
+mkdir -p "${sync_path}" || abort "Cannot create ${sync_path} sync directory"
+cd "${sync_path}" || abort "Cannot switch to ${sync_path} sync directory"
+
+mountpoint="$(stat --printf %m .)"
+[ "${mountpoint}" = "$PWD" ] || info "NOTICE To make $PWD capable for further auto synchronization on a node side, place its content to the mount's root"
+touch .sugar-network-sync
+
+if [ "${clone_url}" ]; then
+ info "Clone master"
+ pull "${clone_url}"
+ exit 0
+fi
+
+# Push packet and collect "continue.packet" to pull afterwards
+for packet in $(find -type f -name '*.packet'); do
+ api_url="$(get_header_key "${packet}" api_url)"
+ if [ -z "${api_url}" ]; then
+ info "Skip ${packet}, it is not intended for uploading"
+ else
+ info "Push ${packet} to ${api_url}"
+ upload "${api_url}?cmd=push" "$(get_hostname ${api_url}).cookie" "${packet}"
+ fi
+done
+
+while true; do
+ found=
+ for cookie in $(find -type f -name '*.cookie'); do
+ api_url="http://$(basename "${cookie}" .cookie)"
+ info "Pull updates from ${api_url}"
+ pull "${api_url}"
+ found=1
+ done
+ [ "${found}" ] || break
+done