Newer
Older
My-Portfolio / backend / routes / activity.js
const express = require("express");
const router = express.Router();
const fetch = require("node-fetch");
const xml2js = require("xml2js");
const pool = require("../db");
const NodeCache = require("node-cache");
require("dotenv").config();

// Initialize cache with a 1-hour TTL (time-to-live)
const cache = new NodeCache({ stdTTL: 3600 });

const BASE_URL = process.env.BASE_URL;
const GIT_API_URL_V2 = process.env.GIT_API_URL_V2;
const REPOSITORY_BASE_URL_V2 = process.env.REPOSITORY_BASE_URL_V2;

// Function to fetch activity data from GitBucket API
const fetchActivityData = async () => {
  try {
    // Fetch the GitBucket logo from the database
    const logoQuery = "SELECT logo FROM tools WHERE name = 'GitBucket'";
    const logoResult = await pool.query(logoQuery);
    const logoUrl = logoResult[0]?.logo || '';
    const fullLogoUrl = logoUrl ? `${BASE_URL}${logoUrl}` : '';

    // Fetch activity data from GitBucket API
    const activityUrl = `${REPOSITORY_BASE_URL_V2}/activities.atom`;
    const response = await fetch(activityUrl);
    const xmlData = await response.text();

    const parser = new xml2js.Parser({ explicitArray: false });
    return new Promise((resolve, reject) => {
      parser.parseString(xmlData, async (err, result) => {
        if (err) {
          console.error("Error parsing XML:", err);
          reject("Failed to parse XML");
        }

        // Ensure `entry` is always an array and take only the last 10 entries
        const entries = Array.isArray(result.feed.entry)
          ? result.feed.entry.slice(0, 10)
          : [result.feed.entry];

        // Extract commit info
        const newsItems = await Promise.all(
          entries.map(async (entry) => {
            const title = entry.title || "No Title";
            const author = entry.author?.name || "Unknown";
            const date = entry.published || "No Date";

            // Extract repository name from the entry's link
            const repoMatch = entry.link?.$.href.match(/([^/]+)\/([^/]+)$/);
            if (!repoMatch) return { title, author, date, url: "#" };

            const repoOwner = repoMatch[1];
            const repoName = repoMatch[2];
            const gitRepository = `${repoOwner}/${repoName}`;

            try {
              // Fetch latest commits for this repository (limit to last 5)
              const commitResponse = await fetch(
                `${GIT_API_URL_V2}/${gitRepository}/commits?per_page=10`
              );
              const commits = await commitResponse.json();

              if (!Array.isArray(commits) || commits.length === 0) {
                return { title, author, date, url: "#" };
              }

              // Convert activity date to timestamp for comparison
              const activityTimestamp = new Date(date).getTime();

              // Find the commit closest to the activity timestamp
              let closestCommit = commits.reduce((closest, commit) => {
                const commitTimestamp = new Date(commit.commit.author.date).getTime();
                return Math.abs(commitTimestamp - activityTimestamp) <
                  Math.abs(new Date(closest.commit.author.date).getTime() - activityTimestamp)
                  ? commit
                  : closest;
              }, commits[0]);

              return {
                name: `${closestCommit.commit.message} at ${repoName}`,
                author,
                date,
                message: closestCommit.commit.message,
                url: `${REPOSITORY_BASE_URL_V2}/${gitRepository}/commit/${closestCommit.sha}`,
                logoUrl: fullLogoUrl,
              };
            } catch (commitErr) {
              console.error(
                `Error fetching commits for ${gitRepository}:`,
                commitErr.message
              );
              return { title, author, date, url: "#" };
            }
          })
        );

        resolve(newsItems);
      });
    });
  } catch (err) {
    console.error("Error fetching activity data:", err.message);
    throw err;
  }
};

// Function to refresh the cache periodically
const refreshCachePeriodically = () => {
  setInterval(async () => {
    console.log("Refreshing activity data cache...");
    try {
      const activityData = await fetchActivityData();
      cache.set("activityData", activityData);
      console.log("Cache refreshed successfully");
    } catch (err) {
      console.error("Error refreshing cache:", err.message);
    }
  }, 3600 * 1000); // Refresh every hour (3600 seconds * 1000 milliseconds)
};

// Start the background process to refresh the cache
refreshCachePeriodically();

// Route to serve cached activity data
router.get("/activity", async (req, res) => {
  try {
    // Check if data is cached
    let activityData = cache.get("activityData");
    if (!activityData) {
      console.log("Cache miss: Fetching fresh data from GitBucket API");
      activityData = await fetchActivityData();
      cache.set("activityData", activityData);
    } else {
      console.log("Cache hit: Serving cached data");
    }

    res.json(activityData);
  } catch (err) {
    console.error("Error fetching activity data:", err.message);
    res.status(500).send("Server error");
  }
});

module.exports = router;