Skip to content

Commit

Permalink
Merge pull request #1374 from DIYgod/master
Browse files Browse the repository at this point in the history
[pull] master from diygod:master
  • Loading branch information
pull[bot] authored May 4, 2024
2 parents e376a51 + 7f7908a commit 8c93911
Show file tree
Hide file tree
Showing 14 changed files with 1,146 additions and 840 deletions.
24 changes: 24 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,30 @@ permissions:
contents: read

jobs:
fix-pnpmp-lock:
# workaround for https://github.com/dependabot/dependabot-core/issues/7258
# until https://github.com/pnpm/pnpm/issues/6530 is fixed
if: github.triggering_actor == 'dependabot[bot]' && github.event_name == 'pull_request'
runs-on: ubuntu-latest
permissions:
pull-requests: write
contents: write
steps:
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v3
with:
version: 8
- uses: actions/setup-node@v4
with:
node-version: lts/*
cache: 'pnpm'
- run: |
rm pnpm-lock.yaml
pnpm i
- uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: 'chore: fix pnpm install'

vitest:
runs-on: ubuntu-latest
timeout-minutes: 10
Expand Down
96 changes: 96 additions & 0 deletions lib/routes/69shu/article.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
import { load } from 'cheerio';
import cache from '@/utils/cache';
import ofetch from '@/utils/ofetch';
import type { Route, DataItem } from '@/types';

export const route: Route = {
path: '/article/:id',
name: '章节',
url: 'www.69shu.top',
maintainers: ['eternasuno'],
example: '/article/47117',
parameters: { id: '小说 id, 可在对应小说页 URL 中找到' },
categories: ['reading'],
features: {
requireConfig: false,
requirePuppeteer: false,
antiCrawler: false,
supportBT: false,
supportPodcast: false,
supportScihub: false,
},
radar: [
{
source: ['www.69shu.top/book/:id.htm'],
target: '/article/:id',
},
],
handler: async (ctx) => {
const { id } = ctx.req.param();
const link = `https://www.69shu.top/book/${id}.htm`;
const $ = load(await get(link));

const item = await Promise.all(
$('.qustime li>a')
.map((_, chapter) => createItem(chapter.attribs.href))
.toArray()
);

return {
title: $('h1>a').text(),
description: $('.navtxt>p:first-of-type').text(),
link,
item,
image: $('.bookimg2>img').attr('src'),
author: $('.booknav2>p:first-of-type>a').text(),
language: 'zh-cn',
};
},
};

const createItem = (url: string) =>
cache.tryGet(url, async () => {
const $ = load(await get(url));
const { articleid, chapterid, chaptername } = parseObject(/bookinfo\s?=\s?{[\S\s]+?}/, $('head>script:not([src])').text());
const decryptionMap = parseObject(/_\d+\s?=\s?{[\S\s]+?}/, $('.txtnav+script').text());

return {
title: chaptername,
description: decrypt($('.txtnav').html() || '', articleid, chapterid, decryptionMap),
link: url,
};
}) as Promise<DataItem>;

const get = async (url: string, encoding = 'gbk') => new TextDecoder(encoding).decode(await ofetch(url, { responseType: 'arrayBuffer' }));

const parseObject = (reg: RegExp, str: string): Record<string, string> => {
const obj = {};
const match = reg.exec(str);
if (match) {
for (const line of match[0].matchAll(/(\w+):\s?["']?([\S\s]+?)["']?[\n,}]/g)) {
obj[line[1]] = line[2];
}
}

return obj;
};

const decrypt = (txt: string, articleid: string, chapterid: string, decryptionMap: Record<string, string>) => {
if (!txt || txt.length < 10) {
return txt;
}

const lineMap = {};
const articleKey = Number(articleid) + 3_061_711;
const chapterKey = Number(chapterid) + 3_421_001;
for (const key of Object.keys(decryptionMap)) {
lineMap[(Number(key) ^ chapterKey) - articleKey] = (Number(decryptionMap[key]) ^ chapterKey) - articleKey;
}

return txt
.split('<br><br>')
.map((line, index, array) => (lineMap[index] ? array[lineMap[index]] : line))
.slice(1, -1)
.join('<br>')
.replaceAll(/\u2003|(<div[\S\s]*?<\/div>)/g, '');
};
6 changes: 6 additions & 0 deletions lib/routes/69shu/namespace.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
import type { Namespace } from '@/types';

export const namespace: Namespace = {
name: '69书吧',
url: '69shu.top',
};
41 changes: 41 additions & 0 deletions lib/routes/apnews/rss.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import { Route } from '@/types';
import parser from '@/utils/rss-parser';
import { fetchArticle } from './utils';
const HOME_PAGE = 'https://apnews.com';

export const route: Route = {
path: '/rss/:rss?',
categories: ['traditional-media'],
example: '/apnews/rss/business',
parameters: { rss: 'Route name from the first segment of the corresponding site, or `index` for the front page(default).' },
features: {
requireConfig: false,
requirePuppeteer: false,
antiCrawler: false,
supportBT: false,
supportPodcast: false,
supportScihub: false,
},
radar: [
{
source: ['apnews.com/:rss'],
target: '/rss/:rss',
},
],
name: 'RSS',
maintainers: ['zoenglinghou', 'mjysci', 'TonyRL'],
handler,
};

async function handler(ctx) {
const { rss = 'index' } = ctx.req.param();
const url = `${HOME_PAGE}/${rss}.rss`;
const res = await parser.parseURL(url);

const items = await Promise.all(res.items.map((item) => fetchArticle(item)));

return {
...rss,
item: items,
};
}
18 changes: 2 additions & 16 deletions lib/routes/apnews/topics.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import { Route } from '@/types';
import cache from '@/utils/cache';
import got from '@/utils/got';
import { load } from 'cheerio';
import timezone from '@/utils/timezone';
import { fetchArticle } from './utils';
const HOME_PAGE = 'https://apnews.com';

export const route: Route = {
Expand Down Expand Up @@ -44,20 +43,7 @@ async function handler(ctx) {
link: $(e).find('a').attr('href'),
}))
.filter((e) => typeof e.link === 'string')
.map((item) =>
cache.tryGet(item.link, async () => {
const { data: response } = await got(item.link);
const $ = load(response);
$('div.Enhancement').remove();
return Object.assign(item, {
pubDate: timezone(new Date($("meta[property='article:published_time']").attr('content')), 0),
updated: timezone(new Date($("meta[property='article:modified_time']").attr('content')), 0),
description: $('div.RichTextStoryBody').html(),
category: $("meta[property='article:section']").attr('content'),
guid: $("meta[name='brightspot.contentId']").attr('content'),
});
})
)
.map((item) => fetchArticle(item))
);

return {
Expand Down
20 changes: 20 additions & 0 deletions lib/routes/apnews/utils.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import cache from '@/utils/cache';
import ofetch from '@/utils/ofetch';
import { parseDate } from '@/utils/parse-date';
import timezone from '@/utils/timezone';
import { load } from 'cheerio';

export function fetchArticle(item) {
return cache.tryGet(item.link, async () => {
const data = await ofetch(item.link);
const $ = load(data);
$('div.Enhancement').remove();
return Object.assign(item, {
pubDate: timezone(parseDate($("meta[property='article:published_time']").attr('content')), 0),
updated: timezone(parseDate($("meta[property='article:modified_time']").attr('content')), 0),
description: $('div.RichTextStoryBody').html(),
category: $("meta[property='article:section']").attr('content'),
guid: $("meta[name='brightspot.contentId']").attr('content'),
});
});
}
10 changes: 5 additions & 5 deletions lib/routes/github/pulls.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { Route } from '@/types';
import got from '@/utils/got';
import ofetch from '@/utils/ofetch';
import { config } from '@/config';
import MarkdownIt from 'markdown-it';
const md = MarkdownIt({
Expand Down Expand Up @@ -28,7 +28,7 @@ export const route: Route = {
},
],
name: 'Repo Pull Requests',
maintainers: [],
maintainers: ['hashman', 'TonyRL'],
handler,
};

Expand All @@ -45,8 +45,8 @@ async function handler(ctx) {
if (config.github && config.github.access_token) {
headers.Authorization = `token ${config.github.access_token}`;
}
const response = await got(url, {
searchParams: {
const response = await ofetch(url, {
query: {
state,
labels,
sort: 'created',
Expand All @@ -55,7 +55,7 @@ async function handler(ctx) {
},
headers,
});
const data = response.data.filter((item) => item.pull_request);
const data = response.filter((item) => item.pull_request);

return {
allowEmpty: true,
Expand Down
6 changes: 6 additions & 0 deletions lib/routes/gq/namespace.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
import type { Namespace } from '@/types';

export const namespace: Namespace = {
name: 'GQ',
url: 'gq.com',
};
59 changes: 59 additions & 0 deletions lib/routes/gq/news.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import { Route } from '@/types';
import cache from '@/utils/cache';
import parser from '@/utils/rss-parser';
import { load } from 'cheerio';
import { ofetch } from 'ofetch';
const host = 'https://www.gq.com';
export const route: Route = {
path: '/news',
categories: ['traditional-media'],
example: '/gq/news',
parameters: {},
features: {
requireConfig: false,
requirePuppeteer: false,
antiCrawler: false,
supportBT: false,
supportPodcast: false,
supportScihub: false,
},
radar: [
{
source: ['gq.com/'],
},
],
name: 'News',
maintainers: ['EthanWng97'],
handler,
};

async function handler() {
const rssUrl = `${host}/feed/rss`;
const feed = await parser.parseURL(rssUrl);
const items = await Promise.all(
feed.items.map((item) =>
cache.tryGet(item.link, async () => {
const data = await ofetch(item.link);
const $ = load(data);
const description = $('#main-content');
description.find('.article-body__footer').remove();
description.find('[class*="ContentHeaderContributorImage"]').remove();
description.find('h1').remove();
return {
title: item.title,
pubDate: item.pubDate,
link: item.link,
category: item.categories,
description: description.html(),
};
})
)
);

return {
title: 'GQ',
link: host,
description: `GQ is the global flagship of men's fashion, the arbiter of cool for anyone who sees the world through the lens of taste and style.`,
item: items,
};
}
Loading

0 comments on commit 8c93911

Please sign in to comment.