#!/bin/bash # twtxt.sh - A simple script to manage a Twtxt v2 feed with following capabilities FEED_FILE="twtxt.txt" CONFIG_DIR="$HOME/.config/twtxt" FOLLOWING_FILE="$CONFIG_DIR/following.txt" FEEDS_DIR="$CONFIG_DIR/feeds" TIMELINE_FILE="$CONFIG_DIR/timeline.txt" FEED_URLS_FILE="$CONFIG_DIR/feed_urls.txt" # Create configuration directories if they don't exist mkdir -p "$CONFIG_DIR" mkdir -p "$FEEDS_DIR" # Initialize feed file if it doesn't exist if [ ! -f "$FEED_FILE" ]; then echo "# nick: ${USER}" >"$FEED_FILE" echo "# url: https://yourdomain.com/$FEED_FILE" >>"$FEED_FILE" echo "" >>"$FEED_FILE" fi # Get the URL of a feed from its metadata file # $1 - path to the metadata file get_feed_url() { grep -m1 -E "^#\s*url\s*[:=]\s*" "$1" | tail -1 | sed 's/^#\s*url\s*[:=]\s*//' } # Get the nick of a feed from its metadata file # $1 - path to the metadata file get_feed_nick() { grep -m1 -E "^#\s*nick\s*[:=]\s*" "$1" | tail -1 | sed 's/^#\s*nick\s*[:=]\s*//' } # Check if GNU date is available # # GNU date is needed to support the -d option for parsing dates in any format. # If GNU date is not available, the script will not be able to parse dates # in other formats than the default ISO 8601 format. check_gnu_date() { if date --version >/dev/null 2>&1; then DATE_IS_GNU=1 else DATE_IS_GNU=0 fi } # Convert a timestamp in ISO 8601 format to a UNIX timestamp # # $1 - timestamp in ISO 8601 format (YYYY-MM-DDTHH:MM:SSZ) # # Returns the timestamp as a UNIX timestamp (seconds since Jan 1, 1970, 00:00:00 UTC) timestamp_to_unix() { local timestamp="$1" # Remove fractional seconds if present timestamp=$(echo "$timestamp" | sed -E 's/\.[0-9]+//') if [ "$DATE_IS_GNU" -eq 1 ]; then # GNU date date -u -d "$timestamp" +"%s" 2>/dev/null else # BSD date (macOS) date -u -j -f "%Y-%m-%dT%H:%M:%SZ" "$timestamp" +"%s" 2>/dev/null fi } # Calculate the Twt Hash for a given feed URL, timestamp, and content # # $1 - feed URL # $2 - timestamp # $3 - content # # Returns the first 11 characters of the SHA-256 hash of the concatenated # components, separated by newline characters. calculate_hash() { local feed_url timestamp content data hash feed_url="$1" timestamp="$2" content="$3" # Concatenate components with newline separators data="${feed_url}\n${timestamp}\n${content}" # Calculate SHA-256 hash and get the first 11 characters hash=$(echo -e "$data" | sha256sum | awk '{print $1}' | cut -c1-11) echo "$hash" } # Post a new twt to the feed file # $1 - the content of the twt # # Appends the twt to the feed file and calculates the Twt Hash post_twt() { local content timestamp feed_url hash content="$1" timestamp=$(date -u +"%Y-%m-%dT%H:%M:%SZ") feed_url=$(get_feed_url "$FEED_FILE") # Append the twt to the feed file echo -e "${timestamp}\t${content}" >>"$FEED_FILE" # Calculate and display the Twt Hash hash=$(calculate_hash "$feed_url" "$timestamp" "$content") echo "Posted twt with hash: $hash" } # Reply to a twt # # $1 - the hash of the twt to reply to # $2 - the content of the reply # # Appends the reply to the feed file and calculates the Twt Hash reply_twt() { local reply_to_hash="$1" shift local content="$* (reply-to:${reply_to_hash})" post_twt "$content" } # Edit a twt # # $1 - the hash of the twt to edit # $2 - the new content of the twt # # Appends the edited twt to the feed file and calculates the Twt Hash edit_twt() { local edit_hash="$1" shift local content="$* (edit:${edit_hash})" post_twt "$content" } # Delete a twt # # $1 - the hash of the twt to delete # # Posts a special twt with the content "(delete:)" to the feed file, # which marks the twt with the given hash as deleted when processing the feed. delete_twt() { local delete_hash="$1" local content="(delete:${delete_hash})" post_twt "$content" } # Calculate the hash of a twt in a feed file by line number # # $1 - line number of the twt # $2 - path to the feed file # # Extracts the timestamp and content of the twt from the feed file, # calculates the hash using the feed URL and the extracted timestamp # and content, and returns the hash. calculate_twt_hash_by_line() { local line_number feed_file feed_url line_content timestamp content line_number="$1" feed_file="$2" feed_url=$(get_feed_url "$feed_file") line_content=$(sed -n "${line_number}p" "$feed_file") timestamp=$(echo "$line_content" | cut -f1) content=$(echo "$line_content" | cut -f2-) calculate_hash "$feed_url" "$timestamp" "$content" } # Follow a new feed # # $1 - URL of the feed to follow # # If the feed is already being followed, a message is printed indicating # that. Otherwise, the URL is appended to the following file and a message # indicating that the feed was added is printed. follow_feed() { local feed_url="$1" # Check if the feed is already being followed if grep -Fxq "$feed_url" "$FOLLOWING_FILE" 2>/dev/null; then echo "You are already following $feed_url" else echo "$feed_url" >>"$FOLLOWING_FILE" echo "Started following $feed_url" fi } # Unfollow a feed # # $1 - URL of the feed to unfollow # # If the feed is being followed, the URL is removed from the following file # and a message indicating that the feed was removed is printed. Otherwise, # a message indicating that you are not following the feed is printed. unfollow_feed() { local feed_url="$1" if grep -Fxq "$feed_url" "$FOLLOWING_FILE" 2>/dev/null; then grep -Fxv "$feed_url" "$FOLLOWING_FILE" >"${FOLLOWING_FILE}.tmp" mv "${FOLLOWING_FILE}.tmp" "$FOLLOWING_FILE" echo "Stopped following $feed_url" else echo "You are not following $feed_url" fi } # Fetch all feeds listed in $FOLLOWING_FILE and store them in $FEEDS_DIR. # # For each feed, fetch the content using curl and store it in a file # with the same name as the feed URL, but with all non-alphanumeric # characters replaced with underscores. The mapping between the feed # file, feed URL, and nick is recorded in $FEED_URLS_FILE. If the feed # does not contain a # url: line, one is prepended to the feed file. fetch_feeds() { if [ ! -f "$FOLLOWING_FILE" ]; then echo "You are not following any feeds." exit 1 fi # Create or clear the mapping file echo -n >"$FEED_URLS_FILE" while read -r feed_url; do feed_filename=${feed_url//[^a-zA-Z0-9]/_} feed_path="$FEEDS_DIR/${feed_filename}.txt" echo "Fetching $feed_url" if curl -s -f -A "twtxt.sh/1.0 (+$feed_url; @${USER})" "$feed_url" -o "$feed_path"; then # Extract nick from the feed feed_nick="$(get_feed_nick "$feed_path")" if [ -z "$feed_nick" ]; then feed_nick="unknown" fi # Record the mapping between feed file, feed URL, and nick echo "${feed_filename}.txt|${feed_url}|${feed_nick}" >>"$FEED_URLS_FILE" # Check if the feed contains a # url: line if ! grep -q "^# url:" "$feed_path"; then # Prepend the # url: line to the feed file sed -i "1i# url: $feed_url" "$feed_path" fi else echo "Failed to fetch $feed_url" fi done <"$FOLLOWING_FILE" } # Display your timeline, which is a combined feed of your own feed and # the feeds you are following. The timeline is sorted in reverse # chronological order and the specified number of most recent twts are # displayed. If no number is specified, 20 twts are displayed. # # $1 - number of twts to display (default: 20) display_timeline() { local num_twts="$1" num_twts="${num_twts:-20}" # Default to 20 if not specified # Remove any existing combined feed combined_feed="$CONFIG_DIR/combined_feed.txt" rm -f "$combined_feed" # Check if GNU date is available check_gnu_date # Process your own feed process_feed "$FEED_FILE" "$combined_feed" "local" # Process followed feeds if [ -d "$FEEDS_DIR" ]; then for feed_file in "$FEEDS_DIR"/*.txt; do [ -e "$feed_file" ] || continue process_feed "$feed_file" "$combined_feed" done fi # Sort and display the combined feed sort -n -k 1 "$combined_feed" | tail -n "$num_twts" >"$TIMELINE_FILE" # Display the timeline with ANSI colors while IFS=$'\t' read -r unix_timestamp display_line; do echo -e "$display_line" done <"$TIMELINE_FILE" } # Process a single feed and append formatted twts to combined feed # # Reads a single feed, extracts the nick and URL from the metadata or # mapping file, formats each twt, and appends it to the combined feed. # # $1 - path to the feed file # $2 - path to the output file # $3 - if set to "local", this is your own feed process_feed() { local feed_file output_file feed_filename feed_url feed_nick mapping_line feed_domain unix_timestamp hash display_line feed_file="$1" output_file="$2" local_feed="$3" # If set to "local", this is your own feed feed_filename=$(basename "$feed_file") if [ "$local_feed" == "local" ]; then # For your own feed, get the URL and nick from the metadata feed_url=$(get_feed_url "$feed_file") feed_nick=$(get_feed_nick "$feed_file") else # Get feed_url and feed_nick from mapping file mapping_line=$(grep "^${feed_filename}|" "$FEED_URLS_FILE") if [ -n "$mapping_line" ]; then feed_url=$(echo "$mapping_line" | cut -d'|' -f2) feed_nick=$(echo "$mapping_line" | cut -d'|' -f3) else # Fallback to metadata if mapping not found feed_url=$(get_feed_url "$feed_file") feed_nick=$(get_feed_nick "$feed_file") fi fi # Ensure feed_url and feed_nick are set [ -z "$feed_url" ] && feed_url="unknown" [ -z "$feed_nick" ] && feed_nick="unknown" feed_domain=$(echo "$feed_url" | awk -F[/:] '{print $4}') [ -z "$feed_domain" ] && feed_domain="unknown" # Read the feed and format each twt while IFS=$'\t' read -r timestamp content; do # Skip empty lines or lines without a timestamp [[ "$timestamp" =~ ^[0-9]{4}-[0-9]{2}-[0-9]{2}T ]] || continue # Calculate the UNIX timestamp unix_timestamp=$(timestamp_to_unix "$timestamp") if [ -z "$unix_timestamp" ]; then # Skip if unable to parse timestamp continue fi # Calculate the hash of the twt hash=$(calculate_hash "$feed_url" "$timestamp" "$content") # Format the display line display_line="$(format_twt "$feed_nick" "$feed_domain" "$timestamp" "$hash" "$content")" # Prepend the UNIX timestamp for sorting echo -e "${unix_timestamp}\t${display_line}" >>"$output_file" done <"$feed_file" } # Format a twt for display with ANSI colors # # $1 - nick # $2 - domain # $3 - timestamp # $4 - hash # $5 - content # # Returns a formatted line with ANSI colors. format_twt() { local nick="$1" local domain="$2" local timestamp="$3" local hash="$4" local content="$5" # ANSI color codes local color_nick="\033[1;34m" # Bold Blue local color_domain="\033[0;34m" # Blue local color_timestamp="\033[0;32m" # Green local color_hash="\033[0;33m" # Yellow local color_reset="\033[0m" # Reset # Build the formatted line local formatted_line="${color_nick}${nick}${color_reset}@${color_domain}${domain}${color_reset} " formatted_line+="[${color_timestamp}${timestamp}${color_reset}] " formatted_line+="<${color_hash}${hash}${color_reset}> " formatted_line+="${content}" echo -e "$formatted_line" } # _main() - Main entry point for twtxt CLI # # Checks the first argument to determine the command to execute and # calls the appropriate function with the remaining arguments. # # Available commands: # post - Post a new twt # reply - Reply to a twt # edit - Edit a twt # delete - Delete a twt # calc-hash - Calculate hash of twt at line number # follow - Follow a new feed # unfollow - Unfollow a feed # fetch - Fetch latest twts from followed feeds # timeline [N] - Display the latest N twts (default 20) _main() { case "$1" in post) shift if [ -z "$1" ]; then echo "Usage: $0 post " exit 1 fi post_twt "$*" ;; reply) shift if [ -z "$1" ] || [ -z "$2" ]; then echo "Usage: $0 reply " exit 1 fi reply_to_hash="$1" shift reply_twt "$reply_to_hash" "$*" ;; edit) shift if [ -z "$1" ] || [ -z "$2" ]; then echo "Usage: $0 edit " exit 1 fi edit_hash="$1" shift edit_twt "$edit_hash" "$*" ;; delete) shift if [ -z "$1" ]; then echo "Usage: $0 delete " exit 1 fi delete_hash="$1" delete_twt "$delete_hash" ;; calc-hash) shift if [ -z "$1" ]; then echo "Usage: $0 calc-hash " exit 1 fi line_number="$1" hash=$(calculate_twt_hash_by_line "$line_number" "$FEED_FILE") echo "Hash of twt at line $line_number: $hash" ;; follow) shift if [ -z "$1" ]; then echo "Usage: $0 follow " exit 1 fi follow_feed "$1" ;; unfollow) shift if [ -z "$1" ]; then echo "Usage: $0 unfollow " exit 1 fi unfollow_feed "$1" ;; fetch) fetch_feeds ;; timeline) shift display_timeline "$1" ;; *) echo "Usage: $0 [options]" echo "Commands:" echo " post - Post a new twt" echo " reply - Reply to a twt" echo " edit - Edit a twt" echo " delete - Delete a twt" echo " calc-hash - Calculate hash of twt at line number" echo " follow - Follow a new feed" echo " unfollow - Unfollow a feed" echo " fetch - Fetch latest twts from followed feeds" echo " timeline [N] - Display the latest N twts (default 20)" ;; esac } if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then _main "$@" fi