feat: rename includes and data directories

This commit is contained in:
Devin Haska 2025-03-16 11:43:16 -07:00
parent 864bc7be71
commit 408e859ef4
41 changed files with 7 additions and 9 deletions

90
src/data/blogroll.js Normal file
View file

@ -0,0 +1,90 @@
const blogroll = [
{
title: "Rach Smith's Digital Garden",
url: "https://rachsmith.com/",
},
{
title: "Robb Knight",
url: "https://rknight.me/",
},
{
title: "Stephanie Stimac",
url: "https://stephaniestimac.com/",
},
{
title: "Lene Saile",
url: "https://www.lenesaile.com/en/",
},
{
title: "Chris Nicholas",
url: "https://chrisnicholas.dev/",
},
{
title: "Katherine Yang",
url: "https://kayserifserif.place/",
},
{
title: "Alice Bartlett",
url: "https://alicebartlett.co.uk/",
},
{
title: "Justin Duke",
url: "https://jmduke.com/",
},
{
title: "Jonas Downey",
url: "https://jonas.do/",
},
{
title: "Robin Rendle",
url: "https://robinrendle.com/",
},
{
title: "Robb Owen",
url: "https://robbowen.digital/",
},
{
title: "CSS in Real Life",
url: "https://css-irl.info/",
},
{
title: "Ryan Mulligan",
url: "https://ryanmulligan.dev/",
},
{
title: "Ryan P. Randall",
url: "https://www.ryanpatrickrandall.com/",
},
{
title: "Aleksandr Hovhannisyan",
url: "https://www.aleksandrhovhannisyan.com/",
},
{
title: "Casey Williams",
url: "https://cassidoo.co/",
},
{
title: "Chris Coyier",
url: "https://chriscoyier.net/",
},
{
title: "Winnie Lim",
url: "https://winnielim.org/",
},
{
title: "Cory Dransfeldt",
url: "https://coryd.dev/",
},
{
title: "HeydonWorks",
url: "https://heydonworks.com",
},
{
title: "Anh",
url: "https://anhvn.com",
},
];
const sortedBlogroll = blogroll.sort((a, b) => a.title.localeCompare(b.title));
export default sortedBlogroll;

68
src/data/letterboxd.js Normal file
View file

@ -0,0 +1,68 @@
import "dotenv/config";
import EleventyFetch from "@11ty/eleventy-fetch";
import cheerio from "cheerio";
import dayjs from "dayjs";
import utc from "dayjs/plugin/utc.js";
import relativeTime from "dayjs/plugin/relativeTime.js";
dayjs.extend(utc);
dayjs.extend(relativeTime);
const fetchLetterboxd = async (duration) => {
try {
const url = `https://letterboxd.com/wonderfulfrog/rss/`;
const response = await EleventyFetch(url, { duration, type: "text" });
return response;
} catch (e) {
console.error("Error fetching data from Letterboxd", e);
return undefined;
}
};
const fetchRecentMovies = async () => {
const response = await fetchLetterboxd("1d");
if (!response) {
return [];
}
const $ = cheerio.load(response, { xml: true });
const recentMovies = [];
$("channel")
.children("item")
.slice(0, 5)
.each((_, element) => {
const title = $(element).children("letterboxd\\:filmTitle").text();
if (!title) {
return false;
}
const watchedDate = $(element)
.children("letterboxd\\:watchedDate")
.text();
const year = $(element).children("letterboxd\\:filmYear").text();
const img = $(element).children("description").text();
const imgSrc = $(img).children("img").attr("src");
const url = $(element).children("link").text();
const rewatch = $(element).children("letterboxd\\:rewatch").text();
const rating = $(element).children("letterboxd\\:memberRating").text();
recentMovies.push({
imgSrc,
isRewatch: rewatch === "Yes",
rating,
title,
url,
watchedDate,
year,
});
});
return recentMovies;
};
export default fetchRecentMovies;

23
src/data/meta.js Normal file
View file

@ -0,0 +1,23 @@
export default {
url: process.env.URL || "http://localhost:8080",
siteName: "wonderfulfrog",
siteDescription:
"My name is Devin Haska and this is my little slice of the internet I call home.",
locale: "en_EN",
lang: "en",
author: "Devin Haska",
repoUrl: "https://github.com/wonderfulfrog/wonderfulfrog.com",
email: "devin@wonderfulfrog.com",
signal: {
name: "wonderfulfrog.64",
url: "https://signal.me/#eu/1Gqnr33MvuJX6QvcvgEjgaFBO7nMW5sdEyFsqHrj0ajEx3HglsVC7ZiesDrO_bXQ",
},
mastodon: {
name: "@wonderfulfrog@mastodon.social",
url: "https://mastodon.social/@wonderfulfrog",
},
github: {
url: "https://github.com/wonderfulfrog",
name: "wonderfulfrog",
},
};

48
src/data/navigation.js Normal file
View file

@ -0,0 +1,48 @@
export default {
top: [
{
text: "About",
url: "/about",
icon: "circle-info",
},
{
text: "Posts",
url: "/posts",
icon: "list",
},
{
text: "Games",
url: "/games",
icon: "game-controller",
},
{
text: "Watching",
url: "/watching",
icon: "tv-retro",
},
{
text: "Books",
url: "/books",
icon: "book",
},
],
bottom: [
{ text: "Blogroll", url: "/blogroll" },
{
text: "Colophon",
url: "/colophon",
},
{
text: "Contact",
url: "/contact",
},
{
text: "/uses",
url: "/uses",
},
{
text: "/now",
url: "/now",
},
],
};

22
src/data/preloads.js Normal file
View file

@ -0,0 +1,22 @@
import { getFontUrl } from "../utils/fonts.js";
import fonts from "../../config/design-tokens/fonts.js";
const preloads = [
{
as: "font",
href: getFontUrl(fonts.display.weights.ExtraBold.path),
crossorigin: true,
},
{
as: "font",
href: getFontUrl(fonts.display.weights.Bold.path),
crossorigin: true,
},
{
as: "font",
href: getFontUrl(fonts.body.weights.Regular.path),
crossorigin: true,
},
];
export default preloads;

112
src/data/robots.js Normal file
View file

@ -0,0 +1,112 @@
import "dotenv/config";
import EleventyFetch from "@11ty/eleventy-fetch";
const accessToken = process.env.DARK_VISITORS_ACCESS_TOKEN;
const STATIC = `# AI Data Scraper
# https://darkvisitors.com/agents/bytespider
User-agent: Bytespider
Disallow: /
# AI Data Scraper
# https://darkvisitors.com/agents/ccbot
User-agent: CCBot
Disallow: /
# AI Data Scraper
# https://darkvisitors.com/agents/claudebot
User-agent: ClaudeBot
Disallow: /
# AI Data Scraper
# https://darkvisitors.com/agents/diffbot
User-agent: Diffbot
Disallow: /
# AI Data Scraper
# https://darkvisitors.com/agents/facebookbot
User-agent: FacebookBot
Disallow: /
# AI Data Scraper
# https://darkvisitors.com/agents/google-extended
User-agent: Google-Extended
Disallow: /
# AI Data Scraper
# https://darkvisitors.com/agents/gptbot
User-agent: GPTBot
Disallow: /
# AI Data Scraper
# https://darkvisitors.com/agents/omgili
User-agent: omgili
Disallow: /
# Undocumented AI Agent
# https://darkvisitors.com/agents/anthropic-ai
User-agent: anthropic-ai
Disallow: /
# Undocumented AI Agent
# https://darkvisitors.com/agents/claude-web
User-agent: Claude-Web
Disallow: /
# Undocumented AI Agent
# https://darkvisitors.com/agents/cohere-ai
User-agent: cohere-ai
Disallow: /
`;
const fetchRobotsTxt = async () => {
const url = "https://api.darkvisitors.com/robots-txts";
const body = JSON.stringify({
agent_types: ["AI Assistant", "AI Data Scraper", "AI Search Crawler"],
disallow: "/",
});
try {
const response = await EleventyFetch(url, {
duration: "1d",
type: "text",
fetchOptions: {
method: "POST",
headers: {
Authorization: `Bearer ${accessToken}`,
["Content-Type"]: "application/json",
},
body,
},
});
return response.toString();
} catch (e) {
console.error(
"Error fetching robots.txt from Dark Visitors API, falling back to static version",
e,
);
return undefined;
}
};
export default async function () {
const robotsTxt = await fetchRobotsTxt();
if (!robotsTxt) {
return STATIC;
}
return robotsTxt;
}