|
#!/usr/bin/env bash |
|
|
|
set -e |
|
cd "${BASH_SOURCE%/*}" || exit |
|
|
|
SPACE_DUMPS="." |
|
SPACE_ORG="org.jetbrains.space" |
|
SPACE_TOKEN="<personal-token>" |
|
source .env |
|
|
|
if [ -z "$1" ]; then |
|
echo "You need to specify the issue (like PROJ-T-123)" |
|
exit 1 |
|
fi |
|
|
|
ISSUE="$1" |
|
PROJECT=${ISSUE%%-*} |
|
FROM="2000-01-01T00%3A00%3A00.000Z" |
|
OUTDIR="$SPACE_DUMPS/$PROJECT/$ISSUE" |
|
COMMENTS_FILE="$OUTDIR/comments.json" |
|
ISSUE_FILE="$OUTDIR/issue.json" |
|
ATTACHMENTS="$OUTDIR/attachments" |
|
|
|
mkdir -p "$OUTDIR" |
|
|
|
# Store the issue itself |
|
curl -s "https://$SPACE_ORG/api/http/issues?issueId=key:$ISSUE&\$fields=id,assignee(username),attachmentsCount,commentsCount,createdBy(name),creationTime,deletedBy(name),deletedTime,dueDate,number,status(name,resolved,archived),tags(name),title,attachments(details(id,filename,name,deletedIdentity)),customFields,description" \ |
|
-H "Authorization: Bearer $SPACE_TOKEN" \ |
|
-H 'Accept: application/json' | jq . > "$ISSUE_FILE" |
|
echo "Got issue.json for $ISSUE" |
|
|
|
MAX_ITERATIONS=50 |
|
|
|
TEMP_FILE=$(mktemp) |
|
|
|
# Initialize an empty array for all messages |
|
echo '[]' > "$TEMP_FILE" |
|
|
|
for ((i=0; i<MAX_ITERATIONS; i++)); do |
|
RESPONSE=$(curl -s "https://$SPACE_ORG/api/http/chats/messages?channel=issue:key:$ISSUE&startFromDate=$FROM&sorting=FromOldestToNewest&batchSize=50&\$fields=nextStartFromDate,messages(id,created,edited,text,time,author(name),details(className),attachments(details(id,filename,name,unfurl(details(tag(name),strikeThrough)))),reactions(emojiReactions(emoji,meReacted,count)))" \ |
|
-H "Authorization: Bearer $SPACE_TOKEN" \ |
|
-H 'Accept: application/json') |
|
|
|
# Extract messages from the response and add to the temporary file |
|
echo "$RESPONSE" | jq '.messages' | jq -s 'add' "$TEMP_FILE" - > "$TEMP_FILE.new" && mv "$TEMP_FILE.new" "$TEMP_FILE" |
|
|
|
LENGTH=$(echo "$RESPONSE" | jq '.messages | length') |
|
echo "Got $LENGTH messages in query $((i+1))" |
|
|
|
if [[ "$LENGTH" != 50 ]]; then |
|
break |
|
fi |
|
|
|
if [[ $i -eq $((MAX_ITERATIONS-1)) ]]; then |
|
echo "NOT EXPECTED" |
|
rm "$TEMP_FILE" |
|
exit 1 |
|
fi |
|
|
|
FROM=$(echo "$RESPONSE" | jq -r '.nextStartFromDate.iso') |
|
done |
|
|
|
# Wrap the final array in an object |
|
jq '{ messages: . }' "$TEMP_FILE" > "$COMMENTS_FILE" |
|
|
|
rm "$TEMP_FILE" |
|
|
|
LENGTH=$(jq '.messages | length' < "$COMMENTS_FILE") |
|
echo "All $LENGTH messages have been concatenated into 'comments.json'" |
|
|
|
# loading the issue attachments |
|
LIST=$(jq -r 'select(.attachments | length > 0).attachments[].details | select(.className != "UnfurlAttachment") | [ .className, .id, .name+.filename ] | @csv' < "$ISSUE_FILE") |
|
|
|
echo "$LIST" | while IFS=',' read -r TYPE ID NAME |
|
do |
|
TYPE=$(echo $TYPE | tr -d '"') |
|
ID=$(echo $ID | tr -d '"') |
|
NAME=$(echo $NAME | tr -d '"') |
|
|
|
# Skip empty lines |
|
[ -z "$TYPE" ] && continue |
|
[ "$TYPE" = "DeletedAttachment" ] && continue |
|
|
|
# skip existing attachments |
|
if [ -f "$ATTACHMENTS/$ID/$NAME" ]; then |
|
echo "Skipping existing \"$NAME\" ($ID / $TYPE)" |
|
continue |
|
fi |
|
|
|
# Print the extracted values |
|
mkdir -p "$ATTACHMENTS/$ID" |
|
echo "Downloading \"$NAME\" ($ID / $TYPE)" |
|
curl -s -L "https://$SPACE_ORG/d/$ID?f=0&download=true" -H "Authorization: Bearer $SPACE_TOKEN" -o "$ATTACHMENTS/$ID/$NAME" |
|
done |
|
|
|
# loading the comment attachments |
|
LIST=$(jq -r '.messages[] | select(.attachments | length > 0).attachments[].details | select(.className != "UnfurlAttachment") | [ .className, .id, .name+.filename ] | @csv' < "$COMMENTS_FILE") |
|
|
|
echo "$LIST" | while IFS=',' read -r TYPE ID NAME |
|
do |
|
TYPE=$(echo $TYPE | tr -d '"') |
|
ID=$(echo $ID | tr -d '"') |
|
NAME=$(echo $NAME | tr -d '"') |
|
|
|
# Skip empty lines |
|
[ -z "$TYPE" ] && continue |
|
[ "$TYPE" = "DeletedAttachment" ] && continue |
|
|
|
# skip existing attachments |
|
if [ -f "$ATTACHMENTS/$ID/$NAME" ]; then |
|
echo "Skipping existing \"$NAME\" ($ID / $TYPE)" |
|
continue |
|
fi |
|
|
|
# Print the extracted values |
|
mkdir -p "$ATTACHMENTS/$ID" |
|
echo "Downloading \"$NAME\" ($ID / $TYPE)" |
|
curl -s -L "https://$SPACE_ORG/d/$ID?f=0&download=true" -H "Authorization: Bearer $SPACE_TOKEN" -o "$ATTACHMENTS/$ID/$NAME" |
|
done |
I was lucky that our single projects were all "just below" that limit. I guess you can add something like
$skip=1000
in line 23. Best to make this an argument to the script and manually call it multiple times. It should then skip the first 1000 and grab the next 1000 issues and as it download them individual it should add it to the existing ones.