This commit is contained in:
Gary Kwok
2024-06-17 18:05:05 +08:00
commit 56f6d03385
105 changed files with 4350 additions and 0 deletions

View File

@@ -0,0 +1,16 @@
---
import {
getCollection,
type CollectionEntry,
type CollectionKey,
} from "astro:content";
export const getSinglePage = async <C extends CollectionKey>(
collectionName: C,
): Promise<CollectionEntry<C>[]> => {
const allPages = await getCollection(collectionName);
const removeIndex = allPages.filter((data) => data.id.match(/^(?!-)/));
const removeDrafts = removeIndex.filter((data) => !data.data.draft);
return removeDrafts;
};
---

View File

@@ -0,0 +1,33 @@
---
import { getSinglePage } from "@/lib/contentParser.astro";
import { slugify } from "@/lib/utils/textConverter";
// get taxonomy from frontmatter
export const getTaxonomy = async (collection: any, name: string) => {
const singlePages = await getSinglePage(collection);
const taxonomyPages = singlePages.map((page: any) => page.data[name]);
let taxonomies: string[] = [];
for (let i = 0; i < taxonomyPages.length; i++) {
const categoryArray = taxonomyPages[i];
for (let j = 0; j < categoryArray.length; j++) {
taxonomies.push(slugify(categoryArray[j])!);
}
}
const taxonomy = [...new Set(taxonomies)];
return taxonomy;
};
// get all taxonomies from frontmatter
export const getAllTaxonomy = async (collection: any, name: string) => {
const singlePages = await getSinglePage(collection);
const taxonomyPages = singlePages.map((page: any) => page.data[name]);
let taxonomies: string[] = [];
for (let i = 0; i < taxonomyPages.length; i++) {
const categoryArray = taxonomyPages[i];
for (let j = 0; j < categoryArray.length; j++) {
taxonomies.push(slugify(categoryArray[j])!);
}
}
return taxonomies;
};
---

View File

@@ -0,0 +1,12 @@
import { format } from "date-fns";
const dateFormat = (
date: Date | string,
pattern: string = "dd MMM, yyyy",
): string => {
const dateObj = new Date(date);
const output = format(dateObj, pattern);
return output;
};
export default dateFormat;

View File

@@ -0,0 +1,40 @@
// content reading
const readingTime = (content: string) => {
const WPS = 275 / 60;
let images = 0;
const regex = /\w/;
let words = content.split(" ").filter((word) => {
if (word.includes("<img")) {
images += 1;
}
return regex.test(word);
}).length;
let imageAdjust = images * 4;
let imageSecs = 0;
let imageFactor = 12;
while (images) {
imageSecs += imageFactor;
if (imageFactor > 3) {
imageFactor -= 1;
}
images -= 1;
}
const minutes = Math.ceil(((words - imageAdjust) / WPS + imageSecs) / 60);
if (minutes < 10) {
if (minutes < 2) {
return "0" + minutes + ` Min read`;
} else {
return "0" + minutes + ` Mins read`;
}
} else {
return minutes + ` Mins read`;
}
};
export default readingTime;

View File

@@ -0,0 +1,36 @@
// similer products
const similerItems = (currentItem: any, allItems: any, slug: string) => {
let categories: [] = [];
let tags: [] = [];
// set categories
if (currentItem.data.categories.length > 0) {
categories = currentItem.data.categories;
}
// set tags
if (currentItem.data.tags.length > 0) {
tags = currentItem.data.tags;
}
// filter by categories
const filterByCategories = allItems.filter(
(item: { data: { categories: string } }) =>
categories.find((category) => item.data.categories.includes(category))
);
// filter by tags
const filterByTags = allItems.filter((item: { data: { tags: string } }) =>
tags.find((tag) => item.data.tags.includes(tag))
);
// merged after filter
const mergedItems = [...new Set([...filterByCategories, ...filterByTags])];
// filter by slug
const filterBySlug = mergedItems.filter((product) => product.slug !== slug);
return filterBySlug;
};
export default similerItems;

View File

@@ -0,0 +1,25 @@
// sort by date
export const sortByDate = (array: any[]) => {
const sortedArray = array.sort(
(a:any, b:any) =>
new Date(b.data.date && b.data.date) -
new Date(a.data.date && a.data.date)
);
return sortedArray;
};
// sort product by weight
export const sortByWeight = (array: any[]) => {
const withWeight = array.filter(
(item: { data: { weight: any } }) => item.data.weight
);
const withoutWeight = array.filter(
(item: { data: { weight: any } }) => !item.data.weight
);
const sortedWeightedArray = withWeight.sort(
(a: { data: { weight: number } }, b: { data: { weight: number } }) =>
a.data.weight - b.data.weight
);
const sortedArray = [...new Set([...sortedWeightedArray, ...withoutWeight])];
return sortedArray;
};

View File

@@ -0,0 +1,8 @@
import { slugify } from "@/lib/utils/textConverter";
const taxonomyFilter = (posts: any[], name: string, key: any) =>
posts.filter((post) =>
post.data[name].map((name: string) => slugify(name)).includes(key)
);
export default taxonomyFilter;

View File

@@ -0,0 +1,57 @@
import { slug } from 'github-slugger';
import { marked } from "marked";
// slugify
export const slugify = (content: string) => {
if (!content) return null;
return slug(content);
};
// markdownify
export const markdownify = (content: string) => {
if (!content) return null;
return marked.parseInline(content);
};
// humanize
export const humanize = (content: string) => {
if (!content) return null;
return content
.replace(/^[\s_]+|[\s_]+$/g, "")
.replace(/[_\s]+/g, " ")
.replace(/^[a-z]/, function (m) {
return m.toUpperCase();
});
};
// plainify
export const plainify = (content: string) => {
if (!content) return null;
const filterBrackets = content.replace(/<\/?[^>]+(>|$)/gm, "");
const filterSpaces = filterBrackets.replace(/[\r\n]\s*[\r\n]/gm, "");
const stripHTML = htmlEntityDecoder(filterSpaces);
return stripHTML;
};
// strip entities for plainify
const htmlEntityDecoder = (htmlWithEntities: string): string => {
let entityList: { [key: string]: string } = {
"&nbsp;": " ",
"&lt;": "<",
"&gt;": ">",
"&amp;": "&",
"&quot;": '"',
"&#39;": "'",
};
let htmlWithoutEntities: string = htmlWithEntities.replace(
/(&amp;|&lt;|&gt;|&quot;|&#39;)/g,
(entity: string): string => {
return entityList[entity];
}
);
return htmlWithoutEntities;
};