mirror of
https://scm.univ-tours.fr/22107988t/rappaurio-sae501_502.git
synced 2025-08-29 23:55:58 +02:00
permet l'ajout des frameworks et des routes
This commit is contained in:
88
app/node_modules/wikijs/dist/cjs/chain.d.ts
generated
vendored
Normal file
88
app/node_modules/wikijs/dist/cjs/chain.d.ts
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
/**
|
||||
* Chain API requests together
|
||||
* @example
|
||||
* // Get page summary and images in same request
|
||||
* wiki.page('batman').then(page => page.chain().summary().image().request()).then(console.log);
|
||||
* @namespace QueryChain
|
||||
*/
|
||||
export default class QueryChain {
|
||||
constructor(apiOptions: any, id: any);
|
||||
id: any;
|
||||
apiOptions: any;
|
||||
_params: {
|
||||
pageids: any;
|
||||
};
|
||||
props: Set<any>;
|
||||
params(): {
|
||||
pageids: any;
|
||||
} & {
|
||||
prop: string;
|
||||
};
|
||||
direct(key: any, ...args: any[]): any;
|
||||
/**
|
||||
* Make combined API request
|
||||
* @method QueryChain#request
|
||||
* @returns {Object|Array} - Data object(s) depending on where the chain was created from
|
||||
*/
|
||||
request(): Object | any[];
|
||||
chain(prop: any, params?: {}): QueryChain;
|
||||
/**
|
||||
* @summary Finds pages near a specific point
|
||||
* @method QueryChain#geosearch
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
geosearch(latitude: any, longitude: any, radius: any): QueryChain;
|
||||
search(query: any, limit?: number): QueryChain;
|
||||
/**
|
||||
* @summary Useful for extracting structured section content
|
||||
* @method QueryChain#content
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
content(): QueryChain;
|
||||
/**
|
||||
* @summary Useful for extracting summary content
|
||||
* @method QueryChain#summary
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
summary(): QueryChain;
|
||||
/**
|
||||
* @summary Extract image
|
||||
* @method QueryChain#image
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
image(types?: {
|
||||
thumbnail: boolean;
|
||||
original: boolean;
|
||||
name: boolean;
|
||||
}): QueryChain;
|
||||
/**
|
||||
* @summary Extract external links
|
||||
* @method QueryChain#extlinks
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
extlinks(): QueryChain;
|
||||
/**
|
||||
* @summary Extract page links
|
||||
* @method QueryChain#links
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
links(limit?: number): QueryChain;
|
||||
/**
|
||||
* @summary Extract categories
|
||||
* @method QueryChain#categories
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
categories(limit?: number): QueryChain;
|
||||
/**
|
||||
* @summary Extract coordinates
|
||||
* @method QueryChain#coordinates
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
coordinates(): QueryChain;
|
||||
/**
|
||||
* @summary Get list of links to different translations
|
||||
* @method QueryChain#langlinks
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
langlinks(): QueryChain;
|
||||
}
|
214
app/node_modules/wikijs/dist/cjs/chain.js
generated
vendored
Normal file
214
app/node_modules/wikijs/dist/cjs/chain.js
generated
vendored
Normal file
@@ -0,0 +1,214 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const util_js_1 = require("./util.js");
|
||||
const processors = {
|
||||
extracts: data => {
|
||||
return { extract: data.extract };
|
||||
},
|
||||
links: data => {
|
||||
return { links: data.links.map(e => e.title) };
|
||||
},
|
||||
extlinks: data => {
|
||||
return { extlinks: data.extlinks.map(e => e['*']) };
|
||||
},
|
||||
langlinks: data => {
|
||||
return {
|
||||
langlinks: data.langlinks.map(link => {
|
||||
return {
|
||||
lang: link.lang,
|
||||
title: link['*'],
|
||||
url: link.url
|
||||
};
|
||||
})
|
||||
};
|
||||
},
|
||||
coordinates: data => {
|
||||
if (data.coordinates) {
|
||||
return { coordinates: data.coordinates[0] };
|
||||
}
|
||||
else {
|
||||
return {};
|
||||
}
|
||||
},
|
||||
categories: data => {
|
||||
return { categories: data.categories.map(e => e.title) };
|
||||
},
|
||||
pageimages: data => {
|
||||
return {
|
||||
image: {
|
||||
name: data.pageimage,
|
||||
thumbnail: data.thumbnail,
|
||||
original: data.original
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
function process(props, rawPageData) {
|
||||
const data = { title: rawPageData.title };
|
||||
return props.reduce((memo, prop) => {
|
||||
if (processors[prop])
|
||||
Object.assign(memo, processors[prop](rawPageData));
|
||||
return memo;
|
||||
}, data);
|
||||
}
|
||||
/**
|
||||
* Chain API requests together
|
||||
* @example
|
||||
* // Get page summary and images in same request
|
||||
* wiki.page('batman').then(page => page.chain().summary().image().request()).then(console.log);
|
||||
* @namespace QueryChain
|
||||
*/
|
||||
class QueryChain {
|
||||
constructor(apiOptions, id) {
|
||||
this.id = id;
|
||||
this.apiOptions = apiOptions;
|
||||
this._params = { pageids: id };
|
||||
this.props = new Set();
|
||||
}
|
||||
params() {
|
||||
const prop = [...this.props].join('|');
|
||||
return Object.assign({}, this._params, { prop });
|
||||
}
|
||||
direct(key, ...args) {
|
||||
return this[key](...args)
|
||||
.request()
|
||||
.then(res => res[key]);
|
||||
}
|
||||
// TODO: Add page searches for root calls - generators
|
||||
// TODO: Add pagination helper method
|
||||
/**
|
||||
* Make combined API request
|
||||
* @method QueryChain#request
|
||||
* @returns {Object|Array} - Data object(s) depending on where the chain was created from
|
||||
*/
|
||||
request() {
|
||||
const props = [...this.props];
|
||||
return (0, util_js_1.api)(this.apiOptions, this.params())
|
||||
.then(res => {
|
||||
if (this.id) {
|
||||
return res.query.pages[this.id];
|
||||
}
|
||||
else {
|
||||
return Object.values(res.query.pages);
|
||||
}
|
||||
})
|
||||
.then(data => {
|
||||
if (Array.isArray(data)) {
|
||||
return data.map(e => process(props, e));
|
||||
}
|
||||
else {
|
||||
return process(props, data);
|
||||
}
|
||||
});
|
||||
}
|
||||
chain(prop, params = {}) {
|
||||
if (prop) {
|
||||
this.props.add(prop);
|
||||
}
|
||||
Object.assign(this._params, params);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* @summary Finds pages near a specific point
|
||||
* @method QueryChain#geosearch
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
geosearch(latitude, longitude, radius) {
|
||||
return this.chain(undefined, {
|
||||
generator: 'geosearch',
|
||||
ggsradius: radius,
|
||||
ggscoord: `${latitude}|${longitude}`
|
||||
});
|
||||
}
|
||||
search(query, limit = 50) {
|
||||
return this.chain(undefined, {
|
||||
list: 'search',
|
||||
srsearch: query,
|
||||
srlimit: limit
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @summary Useful for extracting structured section content
|
||||
* @method QueryChain#content
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
content() {
|
||||
return this.chain('extracts', {
|
||||
explaintext: '1'
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @summary Useful for extracting summary content
|
||||
* @method QueryChain#summary
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
summary() {
|
||||
return this.chain('extracts', {
|
||||
explaintext: '1',
|
||||
exintro: '1'
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @summary Extract image
|
||||
* @method QueryChain#image
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
image(types = { thumbnail: true, original: false, name: true }) {
|
||||
return this.chain('pageimages', {
|
||||
piprop: Object.keys(types)
|
||||
.filter(k => types[k])
|
||||
.join('|')
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @summary Extract external links
|
||||
* @method QueryChain#extlinks
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
extlinks() {
|
||||
return this.chain('extlinks', {
|
||||
ellimit: 'max'
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @summary Extract page links
|
||||
* @method QueryChain#links
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
links(limit = 100) {
|
||||
return this.chain('links', {
|
||||
plnamespace: 0,
|
||||
pllimit: limit
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @summary Extract categories
|
||||
* @method QueryChain#categories
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
categories(limit = 100) {
|
||||
return this.chain('categories', {
|
||||
pllimit: limit
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @summary Extract coordinates
|
||||
* @method QueryChain#coordinates
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
coordinates() {
|
||||
return this.chain('coordinates');
|
||||
}
|
||||
/**
|
||||
* @summary Get list of links to different translations
|
||||
* @method QueryChain#langlinks
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
langlinks() {
|
||||
return this.chain('langlinks', {
|
||||
lllimit: 'max',
|
||||
llprop: 'url'
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.default = QueryChain;
|
7
app/node_modules/wikijs/dist/cjs/coordinates.d.ts
generated
vendored
Normal file
7
app/node_modules/wikijs/dist/cjs/coordinates.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* @ignore
|
||||
* @description Parses Coordinates for pages where the default Wiki Infobox Parser fails.
|
||||
* @param {Object} infoboxData - raw data object from Wiki Infobox Parser
|
||||
* @returns {Object} - formatted object containing coordinates, or null object if none.
|
||||
*/
|
||||
export function parseCoordinates(infoboxData: Object): Object;
|
106
app/node_modules/wikijs/dist/cjs/coordinates.js
generated
vendored
Normal file
106
app/node_modules/wikijs/dist/cjs/coordinates.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.parseCoordinates = void 0;
|
||||
/**
|
||||
* @ignore
|
||||
* @description Parses Coordinates for pages where the default Wiki Infobox Parser fails.
|
||||
* @param {Object} infoboxData - raw data object from Wiki Infobox Parser
|
||||
* @returns {Object} - formatted object containing coordinates, or null object if none.
|
||||
*/
|
||||
function parseCoordinates(infoboxData) {
|
||||
if (infoboxData.coordinates) {
|
||||
return parseInfoboxCoords(infoboxData.coordinates);
|
||||
}
|
||||
if (infoboxData.latd && infoboxData.longd) {
|
||||
return parseDeprecatedCoords(infoboxData);
|
||||
}
|
||||
return {
|
||||
lat: null,
|
||||
lon: null,
|
||||
error: 'No coordinates on page.'
|
||||
};
|
||||
}
|
||||
exports.parseCoordinates = parseCoordinates;
|
||||
/**
|
||||
* @ignore
|
||||
* @description Parses coordinates which are in Wikipedia Deprecated Format.
|
||||
* @example
|
||||
* parseDeprecatedCoords('00 |latm=47 |lats=59 |latNS=S','100 |longm=39 |longs=58 |longEW=E');
|
||||
* @param {String} latString - Deprecated coordinate string for latitutde (from latd property)
|
||||
* @param {String} lonString - Deprecated coordinate string for longitude (from longd property)
|
||||
* @returns {Object} - Wiki formatted object containing lat and lon
|
||||
*/
|
||||
function parseDeprecatedCoords(data) {
|
||||
const latitude = dmsToDecimal(floatOrDefault(data.latd), floatOrDefault(data.latm), floatOrDefault(data.lats), data.latNs);
|
||||
const longitude = dmsToDecimal(floatOrDefault(data.longd), floatOrDefault(data.longm), floatOrDefault(data.longs), data.longEw);
|
||||
return wikiCoordinates(latitude, longitude);
|
||||
}
|
||||
// regex to match coordinate string in infobox
|
||||
const infoboxCoordinatePattern = /(\d{1,2})\|(\d{1,2})\|(\d{1,2})?\|?([NSEW])\|(\d{1,3})\|(\d{1,2})\|(\d{1,2})?\|?([NSEW])/;
|
||||
/**
|
||||
* @ignore
|
||||
* @description Parses coordinates which are embedded in infobox instead of in the page.
|
||||
* @example
|
||||
* parseInfoboxCoord('{{coord|38|54|N|16|36|E|type:region:IT_type:city(94969)|display=inline}}')
|
||||
* @param {String} coord - coordinate string from infobox.
|
||||
* @return {Object} - Wiki formatted object containing lat and lon
|
||||
*/
|
||||
function parseInfoboxCoords(coord) {
|
||||
let matches, latitude, longitude;
|
||||
matches = coord.match(infoboxCoordinatePattern);
|
||||
latitude = convertCoordinatesFromStrings(matches.slice(0, 4));
|
||||
longitude = convertCoordinatesFromStrings(matches.slice(4));
|
||||
return wikiCoordinates(latitude, longitude);
|
||||
}
|
||||
/**
|
||||
* @ignore
|
||||
* @description Converts coordinates after they've been separated into components by regex matching.
|
||||
* Missing or undefined elements in array will be treated as 0. Missing direction will
|
||||
* result in positive coordinate.
|
||||
* @example
|
||||
* convertCoordinatesFromStrings(['38','54','23','N'])
|
||||
* @param {Array} matches - array in format ['degrees','minutes','seconds','direction']
|
||||
* @returns {Number} - coordinate in decimal form, with proper positive / negative sign applied.
|
||||
*/
|
||||
function convertCoordinatesFromStrings(matches) {
|
||||
return dmsToDecimal(floatOrDefault(matches[1]), floatOrDefault(matches[2]), floatOrDefault(matches[3]), matches[4]);
|
||||
}
|
||||
// simplifies positive / negative calculation in decimal conversion
|
||||
const directions = { N: 1, S: -1, E: 1, W: -1 };
|
||||
/**
|
||||
* @ignore
|
||||
* @description Converts coordinates from degrees, minutes, seconds, direction to decimal.
|
||||
* @example
|
||||
* dmsToDecimal(100,39,58,'W') == -100.6661111
|
||||
* @returns {Number} - coordinate in decimal form, with proper positive / negative sign applied.
|
||||
*/
|
||||
function dmsToDecimal(degrees, minutes, seconds, direction) {
|
||||
return ((degrees + (1 / 60) * minutes + (1 / 3600) * seconds) *
|
||||
(directions[direction] || 1));
|
||||
}
|
||||
/**
|
||||
* @ignore
|
||||
* @description Returns latitude and longitude in format Wikipedia Parser would do so.
|
||||
* Rounds to 4 decimal places.
|
||||
* @param {Number} latitude - latitude in decimal form
|
||||
* @param {Number} longitude - longitude in decimal form
|
||||
* @returns {Object} - {lat: latitude, lon: longitude}
|
||||
*/
|
||||
function wikiCoordinates(latitude, longitude) {
|
||||
return {
|
||||
lat: Number(latitude.toFixed(4)),
|
||||
lon: Number(longitude.toFixed(4))
|
||||
};
|
||||
}
|
||||
/**
|
||||
* @ignore
|
||||
* @description Convert numeric string to Number or return 0 if not possible
|
||||
* @example
|
||||
* floatOrDefault("5") == 5; floatOrDefault(undefined) == 0;
|
||||
* @param {String} numStr - input number string (or undefined)
|
||||
* @returns {Number} - returns numStr converted to Number or 0 if NaN
|
||||
*/
|
||||
function floatOrDefault(numStr) {
|
||||
const num = Number(numStr);
|
||||
return !isNaN(num) ? num : 0;
|
||||
}
|
3
app/node_modules/wikijs/dist/cjs/package.json
generated
vendored
Normal file
3
app/node_modules/wikijs/dist/cjs/package.json
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"type": "commonjs"
|
||||
}
|
5
app/node_modules/wikijs/dist/cjs/page.d.ts
generated
vendored
Normal file
5
app/node_modules/wikijs/dist/cjs/page.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* WikiPage
|
||||
* @namespace WikiPage
|
||||
*/
|
||||
export default function wikiPage(rawPageInfo: any, apiOptions: any): any;
|
457
app/node_modules/wikijs/dist/cjs/page.js
generated
vendored
Normal file
457
app/node_modules/wikijs/dist/cjs/page.js
generated
vendored
Normal file
@@ -0,0 +1,457 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const infobox_parser_1 = __importDefault(require("infobox-parser"));
|
||||
const hyntax_1 = require("hyntax");
|
||||
const util_js_1 = require("./util.js");
|
||||
const coordinates_js_1 = require("./coordinates.js");
|
||||
const chain_js_1 = __importDefault(require("./chain.js"));
|
||||
const get = (obj, first, ...rest) => {
|
||||
if (obj === undefined || first === undefined)
|
||||
return obj;
|
||||
if (typeof first === 'function') {
|
||||
return get(first(obj), ...rest);
|
||||
}
|
||||
return get(obj[first], ...rest);
|
||||
};
|
||||
const firstValue = obj => {
|
||||
if (typeof obj === 'object')
|
||||
return obj[Object.keys(obj)[0]];
|
||||
return obj[0];
|
||||
};
|
||||
const getFileName = text => {
|
||||
if (Array.isArray(text))
|
||||
text = text[0];
|
||||
if (!text)
|
||||
return undefined;
|
||||
if (text.indexOf(':') !== -1) {
|
||||
const [, name] = text.split(':');
|
||||
return name;
|
||||
}
|
||||
return text;
|
||||
};
|
||||
/**
|
||||
* WikiPage
|
||||
* @namespace WikiPage
|
||||
*/
|
||||
function wikiPage(rawPageInfo, apiOptions) {
|
||||
const raw = rawPageInfo;
|
||||
/**
|
||||
* HTML from page
|
||||
* @example
|
||||
* wiki.page('batman').then(page => page.html()).then(console.log);
|
||||
* @method WikiPage#html
|
||||
* @return {Promise}
|
||||
*/
|
||||
function html() {
|
||||
return (0, util_js_1.api)(apiOptions, {
|
||||
prop: 'revisions',
|
||||
rvprop: 'content',
|
||||
rvlimit: 1,
|
||||
rvparse: '',
|
||||
titles: raw.title
|
||||
}).then(res => res.query.pages[raw.pageid].revisions[0]['*']);
|
||||
}
|
||||
/**
|
||||
* @summary Useful for extracting structured section content from the page
|
||||
* @alias sections
|
||||
* @example
|
||||
* wiki.page('batman').then(page => page.content()).then(console.log);
|
||||
* @method WikiPage#content
|
||||
* @return {Promise}
|
||||
*/
|
||||
function content() {
|
||||
return rawContent().then(util_js_1.parseContent);
|
||||
}
|
||||
/**
|
||||
* Raw content from page
|
||||
* @example
|
||||
* wiki.page('batman').then(page => page.rawContent()).then(console.log);
|
||||
* @method WikiPage#rawContent
|
||||
* @return {Promise}
|
||||
*/
|
||||
function rawContent() {
|
||||
return chain()
|
||||
.content()
|
||||
.request()
|
||||
.then(res => res.extract);
|
||||
}
|
||||
/**
|
||||
* Text summary from page
|
||||
* @example
|
||||
* wiki.page('batman').then(page => page.summary()).then(console.log);
|
||||
* @method WikiPage#summary
|
||||
* @return {Promise}
|
||||
*/
|
||||
function summary() {
|
||||
return chain()
|
||||
.summary()
|
||||
.request()
|
||||
.then(res => res.extract);
|
||||
}
|
||||
/**
|
||||
* Main page image directly from API
|
||||
* @method WikiPage#pageImage
|
||||
* @returns URL
|
||||
*/
|
||||
function pageImage() {
|
||||
return chain()
|
||||
.image({ original: true, name: true })
|
||||
.request()
|
||||
.then(res => get(res, 'image', 'original', 'source'));
|
||||
}
|
||||
/**
|
||||
* Raw data from images from page
|
||||
* @example
|
||||
* wiki.page('batman').then(page => page.rawImages()).then(console.log);
|
||||
* @method WikiPage#rawImages
|
||||
* @return {Promise}
|
||||
*/
|
||||
function rawImages() {
|
||||
return (0, util_js_1.api)(apiOptions, {
|
||||
generator: 'images',
|
||||
gimlimit: 'max',
|
||||
prop: 'imageinfo',
|
||||
iiprop: 'url',
|
||||
titles: raw.title
|
||||
}).then(res => {
|
||||
if (res.query) {
|
||||
return Object.keys(res.query.pages).map(id => res.query.pages[id]);
|
||||
}
|
||||
return [];
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Main image URL from infobox on page
|
||||
* @example
|
||||
* wiki.page('batman').then(page => page.mainImage()).then(console.log);
|
||||
* @method WikiPage#mainImage
|
||||
* @return {Promise}
|
||||
*/
|
||||
function mainImage() {
|
||||
return Promise.all([rawImages(), info()]).then(([images, info]) => {
|
||||
// Handle different translations of "image" here
|
||||
const mainImageName = getFileName(info.image ||
|
||||
info.bildname ||
|
||||
info.imagen ||
|
||||
info.Immagine ||
|
||||
info.badge ||
|
||||
info.logo);
|
||||
// Handle case where no info box exists
|
||||
if (!mainImageName) {
|
||||
return rawInfo().then(text => {
|
||||
if (!images.length)
|
||||
return undefined;
|
||||
// Sort images by what is seen first in page's info text
|
||||
images.sort((a, b) => text.indexOf(b.title) - text.indexOf(a.title));
|
||||
const image = images[0];
|
||||
const fallback = image && image.imageinfo.length > 0
|
||||
? image.imageinfo[0].url
|
||||
: undefined;
|
||||
// If no image could be found, fallback to page image api result
|
||||
return pageImage().then(url => url || fallback);
|
||||
});
|
||||
}
|
||||
const image = images.find(({ title }) => {
|
||||
const filename = getFileName(title);
|
||||
// Some wikis use underscores for spaces, some don't
|
||||
return (filename.toUpperCase() === mainImageName.toUpperCase() ||
|
||||
filename.replace(/\s/g, '_') === mainImageName);
|
||||
});
|
||||
const fallback = image && image.imageinfo.length > 0
|
||||
? image.imageinfo[0].url
|
||||
: undefined;
|
||||
// If no image could be found, fallback to page image api result
|
||||
return pageImage().then(url => url || fallback);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Image URL's from page
|
||||
* @example
|
||||
* wiki.page('batman').then(page => page.image()).then(console.log);
|
||||
* @method WikiPage#images
|
||||
* @return {Promise}
|
||||
*/
|
||||
function images() {
|
||||
return rawImages().then(images => {
|
||||
return images
|
||||
.map(image => image.imageinfo)
|
||||
.reduce((imageInfos, list) => [...imageInfos, ...list], [])
|
||||
.map(info => info.url);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* External links from page
|
||||
* @example
|
||||
* wiki().page('batman').then(page => page.externalLinks()).then(console.log);
|
||||
* // or
|
||||
* wiki().chain().search('batman').extlinks().request()
|
||||
* @method WikiPage#externalLinks
|
||||
* @return {Promise}
|
||||
*/
|
||||
function externalLinks() {
|
||||
return chain().direct('extlinks');
|
||||
}
|
||||
function hasClass(node, className) {
|
||||
return (node.content.attributes &&
|
||||
node.content.attributes.some(attr => attr.key.content === 'class' &&
|
||||
attr.value.content.indexOf(className) !== -1));
|
||||
}
|
||||
function isTag(node) {
|
||||
return node.nodeType === 'tag';
|
||||
}
|
||||
function hasName(node, name) {
|
||||
return node.content.name === name;
|
||||
}
|
||||
function findNode(node, predicate) {
|
||||
if (predicate(node))
|
||||
return node;
|
||||
// search through children as well
|
||||
if (node.content.children) {
|
||||
for (let child of node.content.children) {
|
||||
const found = findNode(child, predicate);
|
||||
if (found) {
|
||||
return found;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
function findNodes(node, predicate, nodes) {
|
||||
if (predicate(node)) {
|
||||
nodes.push(node);
|
||||
}
|
||||
if (node.content.children) {
|
||||
for (let child of node.content.children) {
|
||||
findNodes(child, predicate, nodes);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* References from page
|
||||
* @example
|
||||
* wiki().page('batman').then(page => page.references()).then(console.log);
|
||||
* @method WikiPage#references
|
||||
* @return {Promise}
|
||||
*/
|
||||
function references() {
|
||||
return html()
|
||||
.then(inputHTML => {
|
||||
const { tokens } = (0, hyntax_1.tokenize)(inputHTML);
|
||||
const { ast } = (0, hyntax_1.constructTree)(tokens);
|
||||
return ast;
|
||||
})
|
||||
.then(ast => {
|
||||
const links = [];
|
||||
const refs = [];
|
||||
// There can be mulitple reference sections
|
||||
findNodes(ast, node => isTag(node) && hasName(node, 'ol') && hasClass(node, 'references'), refs);
|
||||
for (let ref of refs) {
|
||||
const items = ref.content.children.filter(el => isTag(el) && hasName(el, 'li') && el.content.children);
|
||||
for (let item of items) {
|
||||
// The reference was moved under a span under li
|
||||
const span = item.content.children[2];
|
||||
const cite = findNode(span, node => isTag(node) && hasName(node, 'cite'));
|
||||
if (cite) {
|
||||
for (let el of cite.content.children) {
|
||||
if (isTag(el) && hasName(el, 'a') && hasClass(el, 'external')) {
|
||||
const linkAttr = el.content.attributes.find(attr => attr.key.content === 'href');
|
||||
links.push(linkAttr.value.content);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return links;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Paginated links from page
|
||||
* @example
|
||||
* wiki().page('batman').then(page => page.links()).then(console.log);
|
||||
* @method WikiPage#links
|
||||
* @param {Boolean} [aggregated] - return all links (default is true)
|
||||
* @param {Number} [limit] - number of links per page
|
||||
* @return {Promise} - returns results if aggregated [and next function for more results if not aggregated]
|
||||
*/
|
||||
function links(aggregated = true, limit = 100) {
|
||||
const _pagination = (0, util_js_1.pagination)(apiOptions, {
|
||||
prop: 'links',
|
||||
plnamespace: 0,
|
||||
pllimit: limit,
|
||||
titles: raw.title
|
||||
}, res => (res.query.pages[raw.pageid].links || []).map(link => link.title));
|
||||
if (aggregated) {
|
||||
return (0, util_js_1.aggregatePagination)(_pagination);
|
||||
}
|
||||
return _pagination;
|
||||
}
|
||||
/**
|
||||
* Paginated categories from page
|
||||
* @example
|
||||
* wiki().page('batman').then(page => page.categories()).then(console.log);
|
||||
* @method WikiPage#categories
|
||||
* @param {Boolean} [aggregated] - return all categories (default is true)
|
||||
* @param {Number} [limit] - number of categories per page
|
||||
* @return {Promise} - returns results if aggregated [and next function for more results if not aggregated]
|
||||
*/
|
||||
function categories(aggregated = true, limit = 100) {
|
||||
const _pagination = (0, util_js_1.pagination)(apiOptions, chain()
|
||||
.categories(limit)
|
||||
.params(), res => (res.query.pages[raw.pageid].categories || []).map(category => category.title));
|
||||
if (aggregated) {
|
||||
return (0, util_js_1.aggregatePagination)(_pagination);
|
||||
}
|
||||
return _pagination;
|
||||
}
|
||||
/**
|
||||
* Geographical coordinates from page
|
||||
* @example
|
||||
* wiki().page('Texas').then(texas => texas.coordinates())
|
||||
* @method WikiPage#coordinates
|
||||
* @return {Promise}
|
||||
*/
|
||||
function coordinates() {
|
||||
return chain()
|
||||
.direct('coordinates')
|
||||
.then(coords => {
|
||||
if (coords)
|
||||
return coords;
|
||||
// No coordinates for this page, check infobox for deprecated version
|
||||
return info().then(data => (0, coordinates_js_1.parseCoordinates)(data));
|
||||
});
|
||||
}
|
||||
function rawInfo(title) {
|
||||
return (0, util_js_1.api)(apiOptions, {
|
||||
prop: 'revisions',
|
||||
rvprop: 'content',
|
||||
rvsection: 0,
|
||||
titles: title || raw.title
|
||||
}).then(res => get(res, 'query', 'pages', firstValue, 'revisions', 0, '*'));
|
||||
}
|
||||
/**
|
||||
* Fetch and parse tables within page
|
||||
* @method WikiPage#tables
|
||||
* @return {Promise} Resolves to a collection of tables
|
||||
*/
|
||||
function tables() {
|
||||
return (0, util_js_1.api)(apiOptions, {
|
||||
prop: 'revisions',
|
||||
rvprop: 'content',
|
||||
titles: raw.title
|
||||
})
|
||||
.then(res => get(res, 'query', 'pages', firstValue, 'revisions', 0, '*'))
|
||||
.then(wikitext => (0, infobox_parser_1.default)(wikitext, apiOptions.parser).tables);
|
||||
}
|
||||
/**
|
||||
* Get general information from page, with optional specifc property
|
||||
* @deprecated This method will be dropped and replaced with the `fullInfo` implementation in v5
|
||||
* @example
|
||||
* wiki().page('Batman').then(page => page.info('alter_ego'));
|
||||
* @method WikiPage#info
|
||||
* @param {String} [key] - Information key. Falsy keys are ignored
|
||||
* @return {Promise} - info Object contains key/value pairs of infobox data, or specific value if key given
|
||||
*/
|
||||
function info(key) {
|
||||
return rawInfo()
|
||||
.then(wikitext => {
|
||||
// Use general data for now...
|
||||
const info = (0, infobox_parser_1.default)(wikitext, apiOptions.parser).general;
|
||||
if (Object.keys(info).length === 0) {
|
||||
// If empty, check to see if this page has a templated infobox
|
||||
return rawInfo(`Template:Infobox ${raw.title.toLowerCase()}`).then(_wikitext => (0, infobox_parser_1.default)(_wikitext || '', apiOptions.parser).general);
|
||||
}
|
||||
return info;
|
||||
})
|
||||
.then(metadata => {
|
||||
if (!key) {
|
||||
return metadata;
|
||||
}
|
||||
if (metadata.hasOwnProperty(key)) {
|
||||
return metadata[key];
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Get the full infobox data, parsed in a easy to use manner
|
||||
* @example
|
||||
* new Wiki().page('Batman').then(page => page.fullInfo()).then(info => info.general.aliases);
|
||||
* @method WikiPage#fullInfo
|
||||
* @return {Promise} - Parsed object of all infobox data
|
||||
*/
|
||||
function fullInfo() {
|
||||
return rawInfo().then(wikitext => (0, infobox_parser_1.default)(wikitext, apiOptions.parser));
|
||||
}
|
||||
/**
|
||||
* Paginated backlinks from page
|
||||
* @method WikiPage#backlinks
|
||||
* @param {Boolean} [aggregated] - return all backlinks (default is true)
|
||||
* @param {Number} [limit] - number of backlinks per page
|
||||
* @return {Promise} - includes results [and next function for more results if not aggregated]
|
||||
*/
|
||||
function backlinks(aggregated = true, limit = 100) {
|
||||
const _pagination = (0, util_js_1.pagination)(apiOptions, {
|
||||
list: 'backlinks',
|
||||
bllimit: limit,
|
||||
bltitle: raw.title
|
||||
}, res => (res.query.backlinks || []).map(link => link.title));
|
||||
if (aggregated) {
|
||||
return (0, util_js_1.aggregatePagination)(_pagination);
|
||||
}
|
||||
return _pagination;
|
||||
}
|
||||
/**
|
||||
* Get list of links to different translations
|
||||
* @method WikiPage#langlinks
|
||||
* @return {Promise} - includes link objects { lang, title, url }
|
||||
*/
|
||||
function langlinks() {
|
||||
return chain().direct('langlinks');
|
||||
}
|
||||
/**
|
||||
* Get URL for wiki page
|
||||
* @method WikiPage#url
|
||||
* @return {String}
|
||||
*/
|
||||
function url() {
|
||||
return raw.canonicalurl;
|
||||
}
|
||||
const page = Object.assign({}, raw);
|
||||
/**
|
||||
* Returns a QueryChain for the page
|
||||
* @method WikiPage#chain
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
function chain() {
|
||||
return new chain_js_1.default(apiOptions, raw.pageid);
|
||||
}
|
||||
Object.assign(page, {
|
||||
raw,
|
||||
html,
|
||||
rawContent,
|
||||
content,
|
||||
sections: content,
|
||||
summary,
|
||||
images,
|
||||
references,
|
||||
links,
|
||||
externalLinks,
|
||||
categories,
|
||||
coordinates,
|
||||
info,
|
||||
backlinks,
|
||||
rawImages,
|
||||
mainImage,
|
||||
langlinks,
|
||||
rawInfo,
|
||||
fullInfo,
|
||||
pageImage,
|
||||
tables,
|
||||
url,
|
||||
chain
|
||||
});
|
||||
return page;
|
||||
}
|
||||
exports.default = wikiPage;
|
9
app/node_modules/wikijs/dist/cjs/util.d.ts
generated
vendored
Normal file
9
app/node_modules/wikijs/dist/cjs/util.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
export function api(apiOptions: any, params?: {}): Promise<any>;
|
||||
export function pagination(apiOptions: any, params: any, parseResults: any): Promise<{
|
||||
results: any;
|
||||
query: any;
|
||||
next(): Promise<any>;
|
||||
}>;
|
||||
export function aggregatePagination(pagination: any, previousResults?: any[]): any;
|
||||
export function aggregate(apiOptions: any, params: any, list: any, key: any, prefix: any, results?: any[]): any;
|
||||
export function parseContent(source: any): never[];
|
160
app/node_modules/wikijs/dist/cjs/util.js
generated
vendored
Normal file
160
app/node_modules/wikijs/dist/cjs/util.js
generated
vendored
Normal file
@@ -0,0 +1,160 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.parseContent = exports.aggregate = exports.aggregatePagination = exports.pagination = exports.api = void 0;
|
||||
const cross_fetch_1 = __importDefault(require("cross-fetch"));
|
||||
const querystring_1 = __importDefault(require("querystring"));
|
||||
const fetchOptions = {
|
||||
method: 'GET',
|
||||
mode: 'cors',
|
||||
credentials: 'omit'
|
||||
};
|
||||
function api(apiOptions, params = {}) {
|
||||
const qs = Object.assign({
|
||||
format: 'json',
|
||||
action: 'query',
|
||||
redirects: '1'
|
||||
}, params);
|
||||
// Remove undefined properties
|
||||
Object.keys(qs).forEach(key => {
|
||||
if (qs[key] === undefined) {
|
||||
delete qs[key];
|
||||
}
|
||||
});
|
||||
if (apiOptions.origin) {
|
||||
qs.origin = apiOptions.origin;
|
||||
}
|
||||
const url = `${apiOptions.apiUrl}?${querystring_1.default.stringify(qs)}`;
|
||||
const headers = Object.assign({ 'User-Agent': 'WikiJS Bot v1.0' }, apiOptions.headers);
|
||||
return (0, cross_fetch_1.default)(url, Object.assign({ headers }, fetchOptions))
|
||||
.then(res => {
|
||||
if (res.ok) {
|
||||
return res.json();
|
||||
}
|
||||
throw new Error(`${res.status}: ${res.statusText}`);
|
||||
})
|
||||
.then(res => {
|
||||
if (res.error) {
|
||||
throw new Error(res.error.info);
|
||||
}
|
||||
return res;
|
||||
});
|
||||
}
|
||||
exports.api = api;
|
||||
function pagination(apiOptions, params, parseResults) {
|
||||
return api(apiOptions, params).then(res => {
|
||||
let resolution = {};
|
||||
resolution.results = parseResults(res);
|
||||
resolution.query = params.srsearch;
|
||||
if (res['continue']) {
|
||||
const continueType = Object.keys(res['continue']).filter(key => key !== 'continue')[0];
|
||||
const continueKey = res['continue'][continueType];
|
||||
params[continueType] = continueKey;
|
||||
resolution.next = () => pagination(apiOptions, params, parseResults);
|
||||
}
|
||||
return resolution;
|
||||
});
|
||||
}
|
||||
exports.pagination = pagination;
|
||||
function aggregatePagination(pagination, previousResults = []) {
|
||||
return pagination.then(res => {
|
||||
const results = [...previousResults, ...res.results];
|
||||
if (res.next) {
|
||||
return aggregatePagination(res.next(), results);
|
||||
}
|
||||
else {
|
||||
return results;
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.aggregatePagination = aggregatePagination;
|
||||
const pageLimit = 500;
|
||||
function aggregate(apiOptions, params, list, key, prefix, results = []) {
|
||||
params.list = list;
|
||||
params[prefix + 'limit'] = pageLimit;
|
||||
return api(apiOptions, params).then(res => {
|
||||
const nextResults = [...results, ...res.query[list].map(e => e[key])];
|
||||
const continueWith = res['query-continue'] || res.continue;
|
||||
if (continueWith) {
|
||||
const nextFromKey = (continueWith[list] && continueWith[list][prefix + 'from']) ||
|
||||
(continueWith[list] && continueWith[list][prefix + 'continue']) ||
|
||||
continueWith[prefix + 'continue'];
|
||||
params[prefix + 'continue'] = nextFromKey;
|
||||
params[prefix + 'from'] = nextFromKey;
|
||||
return aggregate(apiOptions, params, list, key, prefix, nextResults);
|
||||
}
|
||||
return nextResults;
|
||||
});
|
||||
}
|
||||
exports.aggregate = aggregate;
|
||||
const headingPattern = /(==+)(?:(?!\n)\s?)((?:(?!==|\n)[^])+)(?:(?!\n)\s?)(==+)/g;
|
||||
function getHeadings(text) {
|
||||
let match;
|
||||
const matches = [];
|
||||
while ((match = headingPattern.exec(text)) !== null) {
|
||||
matches.push({
|
||||
level: match[1].trim().length,
|
||||
text: match[2].trim(),
|
||||
start: match.index,
|
||||
end: match.index + match[0].length
|
||||
});
|
||||
}
|
||||
return matches;
|
||||
}
|
||||
function parseContent(source) {
|
||||
const headings = getHeadings(source);
|
||||
const minLevel = Math.min(...headings.map(({ level }) => level));
|
||||
const sections = headings.map((heading, index) => {
|
||||
const next = headings[index + 1];
|
||||
const content = source
|
||||
.substring(heading.end, next ? next.start : undefined)
|
||||
.trim();
|
||||
return {
|
||||
title: heading.text,
|
||||
level: heading.level - minLevel,
|
||||
id: index,
|
||||
content,
|
||||
items: []
|
||||
};
|
||||
});
|
||||
const lastParentLevel = (index, level) => {
|
||||
if (level === 0)
|
||||
return null;
|
||||
for (let i = index - 1; i >= 0; i--) {
|
||||
if (sections[i].level < level) {
|
||||
return sections[i].id;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
// Set parents
|
||||
sections.forEach((section, index) => {
|
||||
section.parent = lastParentLevel(index, section.level);
|
||||
});
|
||||
const root = {
|
||||
items: []
|
||||
};
|
||||
const findSection = id => sections.find(s => id === s.id);
|
||||
// Organize
|
||||
sections.forEach(section => {
|
||||
if (section.parent === null) {
|
||||
root.items.push(section);
|
||||
}
|
||||
else {
|
||||
findSection(section.parent).items.push(section);
|
||||
}
|
||||
});
|
||||
// Clean up
|
||||
sections.forEach(section => {
|
||||
delete section.id;
|
||||
delete section.parent;
|
||||
delete section.level;
|
||||
if (!section.items.length) {
|
||||
delete section.items;
|
||||
}
|
||||
});
|
||||
return root.items;
|
||||
}
|
||||
exports.parseContent = parseContent;
|
9
app/node_modules/wikijs/dist/cjs/wiki.d.ts
generated
vendored
Normal file
9
app/node_modules/wikijs/dist/cjs/wiki.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* wiki
|
||||
* @example
|
||||
* wiki({ apiUrl: 'http://fr.wikipedia.org/w/api.php' }).search(...);
|
||||
* @namespace Wiki
|
||||
* @param {Object} options
|
||||
* @return {Object} - wiki (for chaining methods)
|
||||
*/
|
||||
export default function wiki(options?: Object): Object;
|
325
app/node_modules/wikijs/dist/cjs/wiki.js
generated
vendored
Normal file
325
app/node_modules/wikijs/dist/cjs/wiki.js
generated
vendored
Normal file
@@ -0,0 +1,325 @@
|
||||
'use strict';
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cross_fetch_1 = __importDefault(require("cross-fetch"));
|
||||
const querystring_1 = __importDefault(require("querystring"));
|
||||
const util_js_1 = require("./util.js");
|
||||
const page_js_1 = __importDefault(require("./page.js"));
|
||||
const chain_js_1 = __importDefault(require("./chain.js"));
|
||||
/**
|
||||
* @namespace
|
||||
* @constant
|
||||
* @property {string} apiUrl - URL of Wikipedia API
|
||||
* @property {string} headers - Headers to pass through to the API request
|
||||
* @property {string} origin - When accessing the API using a cross-domain AJAX
|
||||
* request (CORS), set this to the originating domain. This must be included in
|
||||
* any pre-flight request, and therefore must be part of the request URI (not
|
||||
* the POST body). This must match one of the origins in the Origin header
|
||||
* exactly, so it has to be set to something like https://en.wikipedia.org or
|
||||
* https://meta.wikimedia.org. If this parameter does not match the Origin
|
||||
* header, a 403 response will be returned. If this parameter matches the Origin
|
||||
* header and the origin is whitelisted, an Access-Control-Allow-Origin header
|
||||
* will be set.
|
||||
*/
|
||||
const defaultOptions = {
|
||||
apiUrl: 'http://en.wikipedia.org/w/api.php',
|
||||
origin: '*'
|
||||
};
|
||||
/**
|
||||
* wiki
|
||||
* @example
|
||||
* wiki({ apiUrl: 'http://fr.wikipedia.org/w/api.php' }).search(...);
|
||||
* @namespace Wiki
|
||||
* @param {Object} options
|
||||
* @return {Object} - wiki (for chaining methods)
|
||||
*/
|
||||
function wiki(options = {}) {
|
||||
if (this instanceof wiki) {
|
||||
// eslint-disable-next-line
|
||||
console.log('Please do not use wikijs ^1.0.0 as a class. Please see the new README.');
|
||||
}
|
||||
const apiOptions = Object.assign({}, defaultOptions, options);
|
||||
function handleRedirect(res) {
|
||||
if (res.query.redirects && res.query.redirects.length === 1) {
|
||||
return (0, util_js_1.api)(apiOptions, {
|
||||
prop: 'info|pageprops',
|
||||
inprop: 'url',
|
||||
ppprop: 'disambiguation',
|
||||
titles: res.query.redirects[0].to
|
||||
});
|
||||
}
|
||||
return res;
|
||||
}
|
||||
/**
|
||||
* Search articles
|
||||
* @example
|
||||
* wiki.search('star wars').then(data => console.log(data.results.length));
|
||||
* @example
|
||||
* wiki.search('star wars').then(data => {
|
||||
* data.next().then(...);
|
||||
* });
|
||||
* @method Wiki#search
|
||||
* @param {string} query - keyword query
|
||||
* @param {Number} [limit] - limits the number of results
|
||||
* @param {Boolean} [all] - returns entire article objects instead of just titles
|
||||
* @return {Promise} - pagination promise with results and next page function
|
||||
*/
|
||||
function search(query, limit = 50, all = false) {
|
||||
return (0, util_js_1.pagination)(apiOptions, {
|
||||
list: 'search',
|
||||
srsearch: query,
|
||||
srlimit: limit
|
||||
}, res => res.query.search.map(article => {
|
||||
return all ? article : article.title;
|
||||
})).catch(err => {
|
||||
if (err.message === '"text" search is disabled.') {
|
||||
// Try backup search method
|
||||
return opensearch(query, limit);
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Search articles using "fuzzy" prefixsearch
|
||||
* @example
|
||||
* wiki.prefixSearch('star wars').then(data => console.log(data.results.length));
|
||||
* @example
|
||||
* wiki.prefixSearch('star wars').then(data => {
|
||||
* data.next().then(...);
|
||||
* });
|
||||
* @method Wiki#prefixSearch
|
||||
* @param {string} query - keyword query
|
||||
* @param {Number} [limit] - limits the number of results
|
||||
* @return {Promise} - pagination promise with results and next page function
|
||||
*/
|
||||
function prefixSearch(query, limit = 50) {
|
||||
return (0, util_js_1.pagination)(apiOptions, {
|
||||
list: 'prefixsearch',
|
||||
pslimit: limit,
|
||||
psprofile: 'fuzzy',
|
||||
pssearch: query
|
||||
}, res => res.query.prefixsearch.map(article => article.title));
|
||||
}
|
||||
/**
|
||||
* Opensearch (mainly used as a backup to normal text search)
|
||||
* @param {string} query - keyword query
|
||||
* @param {Number} limit - limits the number of results
|
||||
* @return {Array} List of page title results
|
||||
*/
|
||||
function opensearch(query, limit = 50) {
|
||||
return (0, util_js_1.api)(apiOptions, {
|
||||
search: query,
|
||||
limit,
|
||||
namespace: 0,
|
||||
action: 'opensearch',
|
||||
redirects: undefined
|
||||
}).then(res => res[1]);
|
||||
}
|
||||
/**
|
||||
* Random articles
|
||||
* @example
|
||||
* wiki.random(3).then(results => console.log(results[0]));
|
||||
* @method Wiki#random
|
||||
* @param {Number} [limit] - limits the number of random articles
|
||||
* @return {Promise} - List of page titles
|
||||
*/
|
||||
function random(limit = 1) {
|
||||
return (0, util_js_1.api)(apiOptions, {
|
||||
list: 'random',
|
||||
rnnamespace: 0,
|
||||
rnlimit: limit
|
||||
}).then(res => res.query.random.map(article => article.title));
|
||||
}
|
||||
/**
|
||||
* Get Page
|
||||
* @example
|
||||
* wiki.page('Batman').then(page => console.log(page.pageid));
|
||||
* @method Wiki#page
|
||||
* @param {string} title - title of article
|
||||
* @return {Promise}
|
||||
*/
|
||||
function page(title) {
|
||||
return (0, util_js_1.api)(apiOptions, {
|
||||
prop: 'info|pageprops',
|
||||
inprop: 'url',
|
||||
ppprop: 'disambiguation',
|
||||
titles: title
|
||||
})
|
||||
.then(handleRedirect)
|
||||
.then(res => {
|
||||
const id = Object.keys(res.query.pages)[0];
|
||||
if (!id || id === '-1') {
|
||||
throw new Error('No article found');
|
||||
}
|
||||
return (0, page_js_1.default)(res.query.pages[id], apiOptions);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Get Page by PageId
|
||||
* @example
|
||||
* wiki.findById(4335).then(page => console.log(page.title));
|
||||
* @method Wiki#findById
|
||||
* @param {integer} pageid, id of the page
|
||||
* @return {Promise}
|
||||
*/
|
||||
function findById(pageid) {
|
||||
return (0, util_js_1.api)(apiOptions, {
|
||||
prop: 'info|pageprops',
|
||||
inprop: 'url',
|
||||
ppprop: 'disambiguation',
|
||||
pageids: pageid
|
||||
})
|
||||
.then(handleRedirect)
|
||||
.then(res => {
|
||||
const id = Object.keys(res.query.pages)[0];
|
||||
if (!id || id === '-1') {
|
||||
throw new Error('No article found');
|
||||
}
|
||||
return (0, page_js_1.default)(res.query.pages[id], apiOptions);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Find page by query and optional predicate
|
||||
* @example
|
||||
* wiki.find('luke skywalker').then(page => console.log(page.title));
|
||||
* @method Wiki#find
|
||||
* @param {string} search query
|
||||
* @param {function} [predicate] - testing function for choosing which page result to fetch. Default is first result.
|
||||
* @return {Promise}
|
||||
*/
|
||||
function find(query, predicate = results => results[0]) {
|
||||
return search(query)
|
||||
.then(res => predicate(res.results))
|
||||
.then(name => page(name));
|
||||
}
|
||||
/**
|
||||
* Geographical Search
|
||||
* @example
|
||||
* wiki.geoSearch(32.329, -96.136).then(titles => console.log(titles.length));
|
||||
* @method Wiki#geoSearch
|
||||
* @param {Number} lat - latitude
|
||||
* @param {Number} lon - longitude
|
||||
* @param {Number} [radius=1000] - search radius in meters (default: 1km)
|
||||
* @param {Number} [limit=10] - number of results (default: 10 results)
|
||||
* @return {Promise} - List of page titles
|
||||
*/
|
||||
function geoSearch(lat, lon, radius = 1000, limit = 10) {
|
||||
return (0, util_js_1.api)(apiOptions, {
|
||||
list: 'geosearch',
|
||||
gsradius: radius,
|
||||
gscoord: `${lat}|${lon}`,
|
||||
gslimit: limit
|
||||
}).then(res => res.query.geosearch.map(article => article.title));
|
||||
}
|
||||
/**
|
||||
* @summary Find the most viewed pages with counts
|
||||
* @example
|
||||
* wiki.mostViewed().then(list => console.log(`${list[0].title}: ${list[0].count}`))
|
||||
* @method Wiki#mostViewed
|
||||
* @returns {Promise} - Array of {title,count}
|
||||
*/
|
||||
function mostViewed() {
|
||||
return (0, util_js_1.api)(apiOptions, {
|
||||
list: 'mostviewed'
|
||||
}).then(res => {
|
||||
return res.query.mostviewed.map(({ title, count }) => ({ title, count }));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Fetch all page titles in wiki
|
||||
* @method Wiki#allPages
|
||||
* @return {Array} Array of pages
|
||||
*/
|
||||
function allPages() {
|
||||
return (0, util_js_1.aggregate)(apiOptions, {}, 'allpages', 'title', 'ap');
|
||||
}
|
||||
/**
|
||||
* Fetch all categories in wiki
|
||||
* @method Wiki#allCategories
|
||||
* @return {Array} Array of categories
|
||||
*/
|
||||
function allCategories() {
|
||||
return (0, util_js_1.aggregate)(apiOptions, {}, 'allcategories', '*', 'ac');
|
||||
}
|
||||
/**
|
||||
* Fetch all pages in category
|
||||
* @method Wiki#pagesInCategory
|
||||
* @param {String} category Category to fetch from
|
||||
* @return {Array} Array of pages
|
||||
*/
|
||||
function pagesInCategory(category) {
|
||||
return (0, util_js_1.aggregate)(apiOptions, {
|
||||
cmtitle: category
|
||||
}, 'categorymembers', 'title', 'cm');
|
||||
}
|
||||
/**
|
||||
* @summary Helper function to query API directly
|
||||
* @method Wiki#api
|
||||
* @param {Object} params [https://www.mediawiki.org/wiki/API:Query](https://www.mediawiki.org/wiki/API:Query)
|
||||
* @returns {Promise} Query Response
|
||||
* @example
|
||||
* wiki().api({
|
||||
* action: 'parse',
|
||||
* page: 'Pet_door'
|
||||
* }).then(res => res.parse.title.should.equal('Pet door'));
|
||||
*/
|
||||
function rawApi(params) {
|
||||
return (0, util_js_1.api)(apiOptions, params);
|
||||
}
|
||||
/**
|
||||
* @summary Returns a QueryChain to efficiently query specific data
|
||||
* @method Wiki#chain
|
||||
* @returns {QueryChain}
|
||||
* @example
|
||||
* // Find summaries and images of places near a specific location
|
||||
* wiki()
|
||||
* .chain()
|
||||
* .geosearch(52.52437, 13.41053)
|
||||
* .summary()
|
||||
* .image()
|
||||
* .coordinates()
|
||||
* .request()
|
||||
*/
|
||||
function chain() {
|
||||
return new chain_js_1.default(apiOptions);
|
||||
}
|
||||
/**
|
||||
* @summary Returns the Export XML for a page to be used for importing into another MediaWiki
|
||||
* @method Wiki#exportXml
|
||||
* @param {string} pageName
|
||||
* @returns {Promise<string>} Export XML
|
||||
*/
|
||||
function exportXml(pageName) {
|
||||
const qs = {
|
||||
title: 'Special:Export',
|
||||
pages: pageName
|
||||
};
|
||||
// The replace here is kinda hacky since
|
||||
// the export action does not use
|
||||
// the normal api endpoint.
|
||||
const url = `${apiOptions.apiUrl.replace('api', 'index')}?${querystring_1.default.stringify(qs)}`;
|
||||
const headers = Object.assign({ 'User-Agent': 'WikiJS Bot v1.0' }, apiOptions.headers);
|
||||
return (0, cross_fetch_1.default)(url, { headers }).then(res => res.text());
|
||||
}
|
||||
return {
|
||||
search,
|
||||
random,
|
||||
page,
|
||||
geoSearch,
|
||||
options,
|
||||
findById,
|
||||
find,
|
||||
allPages,
|
||||
allCategories,
|
||||
pagesInCategory,
|
||||
opensearch,
|
||||
prefixSearch,
|
||||
mostViewed,
|
||||
api: rawApi,
|
||||
chain,
|
||||
exportXml
|
||||
};
|
||||
}
|
||||
exports.default = wiki;
|
88
app/node_modules/wikijs/dist/mjs/chain.d.ts
generated
vendored
Normal file
88
app/node_modules/wikijs/dist/mjs/chain.d.ts
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
/**
|
||||
* Chain API requests together
|
||||
* @example
|
||||
* // Get page summary and images in same request
|
||||
* wiki.page('batman').then(page => page.chain().summary().image().request()).then(console.log);
|
||||
* @namespace QueryChain
|
||||
*/
|
||||
export default class QueryChain {
|
||||
constructor(apiOptions: any, id: any);
|
||||
id: any;
|
||||
apiOptions: any;
|
||||
_params: {
|
||||
pageids: any;
|
||||
};
|
||||
props: Set<any>;
|
||||
params(): {
|
||||
pageids: any;
|
||||
} & {
|
||||
prop: string;
|
||||
};
|
||||
direct(key: any, ...args: any[]): any;
|
||||
/**
|
||||
* Make combined API request
|
||||
* @method QueryChain#request
|
||||
* @returns {Object|Array} - Data object(s) depending on where the chain was created from
|
||||
*/
|
||||
request(): Object | any[];
|
||||
chain(prop: any, params?: {}): QueryChain;
|
||||
/**
|
||||
* @summary Finds pages near a specific point
|
||||
* @method QueryChain#geosearch
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
geosearch(latitude: any, longitude: any, radius: any): QueryChain;
|
||||
search(query: any, limit?: number): QueryChain;
|
||||
/**
|
||||
* @summary Useful for extracting structured section content
|
||||
* @method QueryChain#content
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
content(): QueryChain;
|
||||
/**
|
||||
* @summary Useful for extracting summary content
|
||||
* @method QueryChain#summary
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
summary(): QueryChain;
|
||||
/**
|
||||
* @summary Extract image
|
||||
* @method QueryChain#image
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
image(types?: {
|
||||
thumbnail: boolean;
|
||||
original: boolean;
|
||||
name: boolean;
|
||||
}): QueryChain;
|
||||
/**
|
||||
* @summary Extract external links
|
||||
* @method QueryChain#extlinks
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
extlinks(): QueryChain;
|
||||
/**
|
||||
* @summary Extract page links
|
||||
* @method QueryChain#links
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
links(limit?: number): QueryChain;
|
||||
/**
|
||||
* @summary Extract categories
|
||||
* @method QueryChain#categories
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
categories(limit?: number): QueryChain;
|
||||
/**
|
||||
* @summary Extract coordinates
|
||||
* @method QueryChain#coordinates
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
coordinates(): QueryChain;
|
||||
/**
|
||||
* @summary Get list of links to different translations
|
||||
* @method QueryChain#langlinks
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
langlinks(): QueryChain;
|
||||
}
|
211
app/node_modules/wikijs/dist/mjs/chain.js
generated
vendored
Normal file
211
app/node_modules/wikijs/dist/mjs/chain.js
generated
vendored
Normal file
@@ -0,0 +1,211 @@
|
||||
import { api } from './util.js';
|
||||
const processors = {
|
||||
extracts: data => {
|
||||
return { extract: data.extract };
|
||||
},
|
||||
links: data => {
|
||||
return { links: data.links.map(e => e.title) };
|
||||
},
|
||||
extlinks: data => {
|
||||
return { extlinks: data.extlinks.map(e => e['*']) };
|
||||
},
|
||||
langlinks: data => {
|
||||
return {
|
||||
langlinks: data.langlinks.map(link => {
|
||||
return {
|
||||
lang: link.lang,
|
||||
title: link['*'],
|
||||
url: link.url
|
||||
};
|
||||
})
|
||||
};
|
||||
},
|
||||
coordinates: data => {
|
||||
if (data.coordinates) {
|
||||
return { coordinates: data.coordinates[0] };
|
||||
}
|
||||
else {
|
||||
return {};
|
||||
}
|
||||
},
|
||||
categories: data => {
|
||||
return { categories: data.categories.map(e => e.title) };
|
||||
},
|
||||
pageimages: data => {
|
||||
return {
|
||||
image: {
|
||||
name: data.pageimage,
|
||||
thumbnail: data.thumbnail,
|
||||
original: data.original
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
function process(props, rawPageData) {
|
||||
const data = { title: rawPageData.title };
|
||||
return props.reduce((memo, prop) => {
|
||||
if (processors[prop])
|
||||
Object.assign(memo, processors[prop](rawPageData));
|
||||
return memo;
|
||||
}, data);
|
||||
}
|
||||
/**
|
||||
* Chain API requests together
|
||||
* @example
|
||||
* // Get page summary and images in same request
|
||||
* wiki.page('batman').then(page => page.chain().summary().image().request()).then(console.log);
|
||||
* @namespace QueryChain
|
||||
*/
|
||||
export default class QueryChain {
|
||||
constructor(apiOptions, id) {
|
||||
this.id = id;
|
||||
this.apiOptions = apiOptions;
|
||||
this._params = { pageids: id };
|
||||
this.props = new Set();
|
||||
}
|
||||
params() {
|
||||
const prop = [...this.props].join('|');
|
||||
return Object.assign({}, this._params, { prop });
|
||||
}
|
||||
direct(key, ...args) {
|
||||
return this[key](...args)
|
||||
.request()
|
||||
.then(res => res[key]);
|
||||
}
|
||||
// TODO: Add page searches for root calls - generators
|
||||
// TODO: Add pagination helper method
|
||||
/**
|
||||
* Make combined API request
|
||||
* @method QueryChain#request
|
||||
* @returns {Object|Array} - Data object(s) depending on where the chain was created from
|
||||
*/
|
||||
request() {
|
||||
const props = [...this.props];
|
||||
return api(this.apiOptions, this.params())
|
||||
.then(res => {
|
||||
if (this.id) {
|
||||
return res.query.pages[this.id];
|
||||
}
|
||||
else {
|
||||
return Object.values(res.query.pages);
|
||||
}
|
||||
})
|
||||
.then(data => {
|
||||
if (Array.isArray(data)) {
|
||||
return data.map(e => process(props, e));
|
||||
}
|
||||
else {
|
||||
return process(props, data);
|
||||
}
|
||||
});
|
||||
}
|
||||
chain(prop, params = {}) {
|
||||
if (prop) {
|
||||
this.props.add(prop);
|
||||
}
|
||||
Object.assign(this._params, params);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* @summary Finds pages near a specific point
|
||||
* @method QueryChain#geosearch
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
geosearch(latitude, longitude, radius) {
|
||||
return this.chain(undefined, {
|
||||
generator: 'geosearch',
|
||||
ggsradius: radius,
|
||||
ggscoord: `${latitude}|${longitude}`
|
||||
});
|
||||
}
|
||||
search(query, limit = 50) {
|
||||
return this.chain(undefined, {
|
||||
list: 'search',
|
||||
srsearch: query,
|
||||
srlimit: limit
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @summary Useful for extracting structured section content
|
||||
* @method QueryChain#content
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
content() {
|
||||
return this.chain('extracts', {
|
||||
explaintext: '1'
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @summary Useful for extracting summary content
|
||||
* @method QueryChain#summary
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
summary() {
|
||||
return this.chain('extracts', {
|
||||
explaintext: '1',
|
||||
exintro: '1'
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @summary Extract image
|
||||
* @method QueryChain#image
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
image(types = { thumbnail: true, original: false, name: true }) {
|
||||
return this.chain('pageimages', {
|
||||
piprop: Object.keys(types)
|
||||
.filter(k => types[k])
|
||||
.join('|')
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @summary Extract external links
|
||||
* @method QueryChain#extlinks
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
extlinks() {
|
||||
return this.chain('extlinks', {
|
||||
ellimit: 'max'
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @summary Extract page links
|
||||
* @method QueryChain#links
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
links(limit = 100) {
|
||||
return this.chain('links', {
|
||||
plnamespace: 0,
|
||||
pllimit: limit
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @summary Extract categories
|
||||
* @method QueryChain#categories
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
categories(limit = 100) {
|
||||
return this.chain('categories', {
|
||||
pllimit: limit
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @summary Extract coordinates
|
||||
* @method QueryChain#coordinates
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
coordinates() {
|
||||
return this.chain('coordinates');
|
||||
}
|
||||
/**
|
||||
* @summary Get list of links to different translations
|
||||
* @method QueryChain#langlinks
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
langlinks() {
|
||||
return this.chain('langlinks', {
|
||||
lllimit: 'max',
|
||||
llprop: 'url'
|
||||
});
|
||||
}
|
||||
}
|
7
app/node_modules/wikijs/dist/mjs/coordinates.d.ts
generated
vendored
Normal file
7
app/node_modules/wikijs/dist/mjs/coordinates.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* @ignore
|
||||
* @description Parses Coordinates for pages where the default Wiki Infobox Parser fails.
|
||||
* @param {Object} infoboxData - raw data object from Wiki Infobox Parser
|
||||
* @returns {Object} - formatted object containing coordinates, or null object if none.
|
||||
*/
|
||||
export function parseCoordinates(infoboxData: Object): Object;
|
102
app/node_modules/wikijs/dist/mjs/coordinates.js
generated
vendored
Normal file
102
app/node_modules/wikijs/dist/mjs/coordinates.js
generated
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
/**
|
||||
* @ignore
|
||||
* @description Parses Coordinates for pages where the default Wiki Infobox Parser fails.
|
||||
* @param {Object} infoboxData - raw data object from Wiki Infobox Parser
|
||||
* @returns {Object} - formatted object containing coordinates, or null object if none.
|
||||
*/
|
||||
export function parseCoordinates(infoboxData) {
|
||||
if (infoboxData.coordinates) {
|
||||
return parseInfoboxCoords(infoboxData.coordinates);
|
||||
}
|
||||
if (infoboxData.latd && infoboxData.longd) {
|
||||
return parseDeprecatedCoords(infoboxData);
|
||||
}
|
||||
return {
|
||||
lat: null,
|
||||
lon: null,
|
||||
error: 'No coordinates on page.'
|
||||
};
|
||||
}
|
||||
/**
|
||||
* @ignore
|
||||
* @description Parses coordinates which are in Wikipedia Deprecated Format.
|
||||
* @example
|
||||
* parseDeprecatedCoords('00 |latm=47 |lats=59 |latNS=S','100 |longm=39 |longs=58 |longEW=E');
|
||||
* @param {String} latString - Deprecated coordinate string for latitutde (from latd property)
|
||||
* @param {String} lonString - Deprecated coordinate string for longitude (from longd property)
|
||||
* @returns {Object} - Wiki formatted object containing lat and lon
|
||||
*/
|
||||
function parseDeprecatedCoords(data) {
|
||||
const latitude = dmsToDecimal(floatOrDefault(data.latd), floatOrDefault(data.latm), floatOrDefault(data.lats), data.latNs);
|
||||
const longitude = dmsToDecimal(floatOrDefault(data.longd), floatOrDefault(data.longm), floatOrDefault(data.longs), data.longEw);
|
||||
return wikiCoordinates(latitude, longitude);
|
||||
}
|
||||
// regex to match coordinate string in infobox
|
||||
const infoboxCoordinatePattern = /(\d{1,2})\|(\d{1,2})\|(\d{1,2})?\|?([NSEW])\|(\d{1,3})\|(\d{1,2})\|(\d{1,2})?\|?([NSEW])/;
|
||||
/**
|
||||
* @ignore
|
||||
* @description Parses coordinates which are embedded in infobox instead of in the page.
|
||||
* @example
|
||||
* parseInfoboxCoord('{{coord|38|54|N|16|36|E|type:region:IT_type:city(94969)|display=inline}}')
|
||||
* @param {String} coord - coordinate string from infobox.
|
||||
* @return {Object} - Wiki formatted object containing lat and lon
|
||||
*/
|
||||
function parseInfoboxCoords(coord) {
|
||||
let matches, latitude, longitude;
|
||||
matches = coord.match(infoboxCoordinatePattern);
|
||||
latitude = convertCoordinatesFromStrings(matches.slice(0, 4));
|
||||
longitude = convertCoordinatesFromStrings(matches.slice(4));
|
||||
return wikiCoordinates(latitude, longitude);
|
||||
}
|
||||
/**
|
||||
* @ignore
|
||||
* @description Converts coordinates after they've been separated into components by regex matching.
|
||||
* Missing or undefined elements in array will be treated as 0. Missing direction will
|
||||
* result in positive coordinate.
|
||||
* @example
|
||||
* convertCoordinatesFromStrings(['38','54','23','N'])
|
||||
* @param {Array} matches - array in format ['degrees','minutes','seconds','direction']
|
||||
* @returns {Number} - coordinate in decimal form, with proper positive / negative sign applied.
|
||||
*/
|
||||
function convertCoordinatesFromStrings(matches) {
|
||||
return dmsToDecimal(floatOrDefault(matches[1]), floatOrDefault(matches[2]), floatOrDefault(matches[3]), matches[4]);
|
||||
}
|
||||
// simplifies positive / negative calculation in decimal conversion
|
||||
const directions = { N: 1, S: -1, E: 1, W: -1 };
|
||||
/**
|
||||
* @ignore
|
||||
* @description Converts coordinates from degrees, minutes, seconds, direction to decimal.
|
||||
* @example
|
||||
* dmsToDecimal(100,39,58,'W') == -100.6661111
|
||||
* @returns {Number} - coordinate in decimal form, with proper positive / negative sign applied.
|
||||
*/
|
||||
function dmsToDecimal(degrees, minutes, seconds, direction) {
|
||||
return ((degrees + (1 / 60) * minutes + (1 / 3600) * seconds) *
|
||||
(directions[direction] || 1));
|
||||
}
|
||||
/**
|
||||
* @ignore
|
||||
* @description Returns latitude and longitude in format Wikipedia Parser would do so.
|
||||
* Rounds to 4 decimal places.
|
||||
* @param {Number} latitude - latitude in decimal form
|
||||
* @param {Number} longitude - longitude in decimal form
|
||||
* @returns {Object} - {lat: latitude, lon: longitude}
|
||||
*/
|
||||
function wikiCoordinates(latitude, longitude) {
|
||||
return {
|
||||
lat: Number(latitude.toFixed(4)),
|
||||
lon: Number(longitude.toFixed(4))
|
||||
};
|
||||
}
|
||||
/**
|
||||
* @ignore
|
||||
* @description Convert numeric string to Number or return 0 if not possible
|
||||
* @example
|
||||
* floatOrDefault("5") == 5; floatOrDefault(undefined) == 0;
|
||||
* @param {String} numStr - input number string (or undefined)
|
||||
* @returns {Number} - returns numStr converted to Number or 0 if NaN
|
||||
*/
|
||||
function floatOrDefault(numStr) {
|
||||
const num = Number(numStr);
|
||||
return !isNaN(num) ? num : 0;
|
||||
}
|
3
app/node_modules/wikijs/dist/mjs/package.json
generated
vendored
Normal file
3
app/node_modules/wikijs/dist/mjs/package.json
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"type": "module"
|
||||
}
|
5
app/node_modules/wikijs/dist/mjs/page.d.ts
generated
vendored
Normal file
5
app/node_modules/wikijs/dist/mjs/page.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* WikiPage
|
||||
* @namespace WikiPage
|
||||
*/
|
||||
export default function wikiPage(rawPageInfo: any, apiOptions: any): any;
|
451
app/node_modules/wikijs/dist/mjs/page.js
generated
vendored
Normal file
451
app/node_modules/wikijs/dist/mjs/page.js
generated
vendored
Normal file
@@ -0,0 +1,451 @@
|
||||
import infoboxParser from 'infobox-parser';
|
||||
import { tokenize, constructTree } from 'hyntax';
|
||||
import { aggregatePagination, pagination, api, parseContent } from './util.js';
|
||||
import { parseCoordinates } from './coordinates.js';
|
||||
import QueryChain from './chain.js';
|
||||
const get = (obj, first, ...rest) => {
|
||||
if (obj === undefined || first === undefined)
|
||||
return obj;
|
||||
if (typeof first === 'function') {
|
||||
return get(first(obj), ...rest);
|
||||
}
|
||||
return get(obj[first], ...rest);
|
||||
};
|
||||
const firstValue = obj => {
|
||||
if (typeof obj === 'object')
|
||||
return obj[Object.keys(obj)[0]];
|
||||
return obj[0];
|
||||
};
|
||||
const getFileName = text => {
|
||||
if (Array.isArray(text))
|
||||
text = text[0];
|
||||
if (!text)
|
||||
return undefined;
|
||||
if (text.indexOf(':') !== -1) {
|
||||
const [, name] = text.split(':');
|
||||
return name;
|
||||
}
|
||||
return text;
|
||||
};
|
||||
/**
|
||||
* WikiPage
|
||||
* @namespace WikiPage
|
||||
*/
|
||||
export default function wikiPage(rawPageInfo, apiOptions) {
|
||||
const raw = rawPageInfo;
|
||||
/**
|
||||
* HTML from page
|
||||
* @example
|
||||
* wiki.page('batman').then(page => page.html()).then(console.log);
|
||||
* @method WikiPage#html
|
||||
* @return {Promise}
|
||||
*/
|
||||
function html() {
|
||||
return api(apiOptions, {
|
||||
prop: 'revisions',
|
||||
rvprop: 'content',
|
||||
rvlimit: 1,
|
||||
rvparse: '',
|
||||
titles: raw.title
|
||||
}).then(res => res.query.pages[raw.pageid].revisions[0]['*']);
|
||||
}
|
||||
/**
|
||||
* @summary Useful for extracting structured section content from the page
|
||||
* @alias sections
|
||||
* @example
|
||||
* wiki.page('batman').then(page => page.content()).then(console.log);
|
||||
* @method WikiPage#content
|
||||
* @return {Promise}
|
||||
*/
|
||||
function content() {
|
||||
return rawContent().then(parseContent);
|
||||
}
|
||||
/**
|
||||
* Raw content from page
|
||||
* @example
|
||||
* wiki.page('batman').then(page => page.rawContent()).then(console.log);
|
||||
* @method WikiPage#rawContent
|
||||
* @return {Promise}
|
||||
*/
|
||||
function rawContent() {
|
||||
return chain()
|
||||
.content()
|
||||
.request()
|
||||
.then(res => res.extract);
|
||||
}
|
||||
/**
|
||||
* Text summary from page
|
||||
* @example
|
||||
* wiki.page('batman').then(page => page.summary()).then(console.log);
|
||||
* @method WikiPage#summary
|
||||
* @return {Promise}
|
||||
*/
|
||||
function summary() {
|
||||
return chain()
|
||||
.summary()
|
||||
.request()
|
||||
.then(res => res.extract);
|
||||
}
|
||||
/**
|
||||
* Main page image directly from API
|
||||
* @method WikiPage#pageImage
|
||||
* @returns URL
|
||||
*/
|
||||
function pageImage() {
|
||||
return chain()
|
||||
.image({ original: true, name: true })
|
||||
.request()
|
||||
.then(res => get(res, 'image', 'original', 'source'));
|
||||
}
|
||||
/**
|
||||
* Raw data from images from page
|
||||
* @example
|
||||
* wiki.page('batman').then(page => page.rawImages()).then(console.log);
|
||||
* @method WikiPage#rawImages
|
||||
* @return {Promise}
|
||||
*/
|
||||
function rawImages() {
|
||||
return api(apiOptions, {
|
||||
generator: 'images',
|
||||
gimlimit: 'max',
|
||||
prop: 'imageinfo',
|
||||
iiprop: 'url',
|
||||
titles: raw.title
|
||||
}).then(res => {
|
||||
if (res.query) {
|
||||
return Object.keys(res.query.pages).map(id => res.query.pages[id]);
|
||||
}
|
||||
return [];
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Main image URL from infobox on page
|
||||
* @example
|
||||
* wiki.page('batman').then(page => page.mainImage()).then(console.log);
|
||||
* @method WikiPage#mainImage
|
||||
* @return {Promise}
|
||||
*/
|
||||
function mainImage() {
|
||||
return Promise.all([rawImages(), info()]).then(([images, info]) => {
|
||||
// Handle different translations of "image" here
|
||||
const mainImageName = getFileName(info.image ||
|
||||
info.bildname ||
|
||||
info.imagen ||
|
||||
info.Immagine ||
|
||||
info.badge ||
|
||||
info.logo);
|
||||
// Handle case where no info box exists
|
||||
if (!mainImageName) {
|
||||
return rawInfo().then(text => {
|
||||
if (!images.length)
|
||||
return undefined;
|
||||
// Sort images by what is seen first in page's info text
|
||||
images.sort((a, b) => text.indexOf(b.title) - text.indexOf(a.title));
|
||||
const image = images[0];
|
||||
const fallback = image && image.imageinfo.length > 0
|
||||
? image.imageinfo[0].url
|
||||
: undefined;
|
||||
// If no image could be found, fallback to page image api result
|
||||
return pageImage().then(url => url || fallback);
|
||||
});
|
||||
}
|
||||
const image = images.find(({ title }) => {
|
||||
const filename = getFileName(title);
|
||||
// Some wikis use underscores for spaces, some don't
|
||||
return (filename.toUpperCase() === mainImageName.toUpperCase() ||
|
||||
filename.replace(/\s/g, '_') === mainImageName);
|
||||
});
|
||||
const fallback = image && image.imageinfo.length > 0
|
||||
? image.imageinfo[0].url
|
||||
: undefined;
|
||||
// If no image could be found, fallback to page image api result
|
||||
return pageImage().then(url => url || fallback);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Image URL's from page
|
||||
* @example
|
||||
* wiki.page('batman').then(page => page.image()).then(console.log);
|
||||
* @method WikiPage#images
|
||||
* @return {Promise}
|
||||
*/
|
||||
function images() {
|
||||
return rawImages().then(images => {
|
||||
return images
|
||||
.map(image => image.imageinfo)
|
||||
.reduce((imageInfos, list) => [...imageInfos, ...list], [])
|
||||
.map(info => info.url);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* External links from page
|
||||
* @example
|
||||
* wiki().page('batman').then(page => page.externalLinks()).then(console.log);
|
||||
* // or
|
||||
* wiki().chain().search('batman').extlinks().request()
|
||||
* @method WikiPage#externalLinks
|
||||
* @return {Promise}
|
||||
*/
|
||||
function externalLinks() {
|
||||
return chain().direct('extlinks');
|
||||
}
|
||||
function hasClass(node, className) {
|
||||
return (node.content.attributes &&
|
||||
node.content.attributes.some(attr => attr.key.content === 'class' &&
|
||||
attr.value.content.indexOf(className) !== -1));
|
||||
}
|
||||
function isTag(node) {
|
||||
return node.nodeType === 'tag';
|
||||
}
|
||||
function hasName(node, name) {
|
||||
return node.content.name === name;
|
||||
}
|
||||
function findNode(node, predicate) {
|
||||
if (predicate(node))
|
||||
return node;
|
||||
// search through children as well
|
||||
if (node.content.children) {
|
||||
for (let child of node.content.children) {
|
||||
const found = findNode(child, predicate);
|
||||
if (found) {
|
||||
return found;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
function findNodes(node, predicate, nodes) {
|
||||
if (predicate(node)) {
|
||||
nodes.push(node);
|
||||
}
|
||||
if (node.content.children) {
|
||||
for (let child of node.content.children) {
|
||||
findNodes(child, predicate, nodes);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* References from page
|
||||
* @example
|
||||
* wiki().page('batman').then(page => page.references()).then(console.log);
|
||||
* @method WikiPage#references
|
||||
* @return {Promise}
|
||||
*/
|
||||
function references() {
|
||||
return html()
|
||||
.then(inputHTML => {
|
||||
const { tokens } = tokenize(inputHTML);
|
||||
const { ast } = constructTree(tokens);
|
||||
return ast;
|
||||
})
|
||||
.then(ast => {
|
||||
const links = [];
|
||||
const refs = [];
|
||||
// There can be mulitple reference sections
|
||||
findNodes(ast, node => isTag(node) && hasName(node, 'ol') && hasClass(node, 'references'), refs);
|
||||
for (let ref of refs) {
|
||||
const items = ref.content.children.filter(el => isTag(el) && hasName(el, 'li') && el.content.children);
|
||||
for (let item of items) {
|
||||
// The reference was moved under a span under li
|
||||
const span = item.content.children[2];
|
||||
const cite = findNode(span, node => isTag(node) && hasName(node, 'cite'));
|
||||
if (cite) {
|
||||
for (let el of cite.content.children) {
|
||||
if (isTag(el) && hasName(el, 'a') && hasClass(el, 'external')) {
|
||||
const linkAttr = el.content.attributes.find(attr => attr.key.content === 'href');
|
||||
links.push(linkAttr.value.content);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return links;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Paginated links from page
|
||||
* @example
|
||||
* wiki().page('batman').then(page => page.links()).then(console.log);
|
||||
* @method WikiPage#links
|
||||
* @param {Boolean} [aggregated] - return all links (default is true)
|
||||
* @param {Number} [limit] - number of links per page
|
||||
* @return {Promise} - returns results if aggregated [and next function for more results if not aggregated]
|
||||
*/
|
||||
function links(aggregated = true, limit = 100) {
|
||||
const _pagination = pagination(apiOptions, {
|
||||
prop: 'links',
|
||||
plnamespace: 0,
|
||||
pllimit: limit,
|
||||
titles: raw.title
|
||||
}, res => (res.query.pages[raw.pageid].links || []).map(link => link.title));
|
||||
if (aggregated) {
|
||||
return aggregatePagination(_pagination);
|
||||
}
|
||||
return _pagination;
|
||||
}
|
||||
/**
|
||||
* Paginated categories from page
|
||||
* @example
|
||||
* wiki().page('batman').then(page => page.categories()).then(console.log);
|
||||
* @method WikiPage#categories
|
||||
* @param {Boolean} [aggregated] - return all categories (default is true)
|
||||
* @param {Number} [limit] - number of categories per page
|
||||
* @return {Promise} - returns results if aggregated [and next function for more results if not aggregated]
|
||||
*/
|
||||
function categories(aggregated = true, limit = 100) {
|
||||
const _pagination = pagination(apiOptions, chain()
|
||||
.categories(limit)
|
||||
.params(), res => (res.query.pages[raw.pageid].categories || []).map(category => category.title));
|
||||
if (aggregated) {
|
||||
return aggregatePagination(_pagination);
|
||||
}
|
||||
return _pagination;
|
||||
}
|
||||
/**
|
||||
* Geographical coordinates from page
|
||||
* @example
|
||||
* wiki().page('Texas').then(texas => texas.coordinates())
|
||||
* @method WikiPage#coordinates
|
||||
* @return {Promise}
|
||||
*/
|
||||
function coordinates() {
|
||||
return chain()
|
||||
.direct('coordinates')
|
||||
.then(coords => {
|
||||
if (coords)
|
||||
return coords;
|
||||
// No coordinates for this page, check infobox for deprecated version
|
||||
return info().then(data => parseCoordinates(data));
|
||||
});
|
||||
}
|
||||
function rawInfo(title) {
|
||||
return api(apiOptions, {
|
||||
prop: 'revisions',
|
||||
rvprop: 'content',
|
||||
rvsection: 0,
|
||||
titles: title || raw.title
|
||||
}).then(res => get(res, 'query', 'pages', firstValue, 'revisions', 0, '*'));
|
||||
}
|
||||
/**
|
||||
* Fetch and parse tables within page
|
||||
* @method WikiPage#tables
|
||||
* @return {Promise} Resolves to a collection of tables
|
||||
*/
|
||||
function tables() {
|
||||
return api(apiOptions, {
|
||||
prop: 'revisions',
|
||||
rvprop: 'content',
|
||||
titles: raw.title
|
||||
})
|
||||
.then(res => get(res, 'query', 'pages', firstValue, 'revisions', 0, '*'))
|
||||
.then(wikitext => infoboxParser(wikitext, apiOptions.parser).tables);
|
||||
}
|
||||
/**
|
||||
* Get general information from page, with optional specifc property
|
||||
* @deprecated This method will be dropped and replaced with the `fullInfo` implementation in v5
|
||||
* @example
|
||||
* wiki().page('Batman').then(page => page.info('alter_ego'));
|
||||
* @method WikiPage#info
|
||||
* @param {String} [key] - Information key. Falsy keys are ignored
|
||||
* @return {Promise} - info Object contains key/value pairs of infobox data, or specific value if key given
|
||||
*/
|
||||
function info(key) {
|
||||
return rawInfo()
|
||||
.then(wikitext => {
|
||||
// Use general data for now...
|
||||
const info = infoboxParser(wikitext, apiOptions.parser).general;
|
||||
if (Object.keys(info).length === 0) {
|
||||
// If empty, check to see if this page has a templated infobox
|
||||
return rawInfo(`Template:Infobox ${raw.title.toLowerCase()}`).then(_wikitext => infoboxParser(_wikitext || '', apiOptions.parser).general);
|
||||
}
|
||||
return info;
|
||||
})
|
||||
.then(metadata => {
|
||||
if (!key) {
|
||||
return metadata;
|
||||
}
|
||||
if (metadata.hasOwnProperty(key)) {
|
||||
return metadata[key];
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Get the full infobox data, parsed in a easy to use manner
|
||||
* @example
|
||||
* new Wiki().page('Batman').then(page => page.fullInfo()).then(info => info.general.aliases);
|
||||
* @method WikiPage#fullInfo
|
||||
* @return {Promise} - Parsed object of all infobox data
|
||||
*/
|
||||
function fullInfo() {
|
||||
return rawInfo().then(wikitext => infoboxParser(wikitext, apiOptions.parser));
|
||||
}
|
||||
/**
|
||||
* Paginated backlinks from page
|
||||
* @method WikiPage#backlinks
|
||||
* @param {Boolean} [aggregated] - return all backlinks (default is true)
|
||||
* @param {Number} [limit] - number of backlinks per page
|
||||
* @return {Promise} - includes results [and next function for more results if not aggregated]
|
||||
*/
|
||||
function backlinks(aggregated = true, limit = 100) {
|
||||
const _pagination = pagination(apiOptions, {
|
||||
list: 'backlinks',
|
||||
bllimit: limit,
|
||||
bltitle: raw.title
|
||||
}, res => (res.query.backlinks || []).map(link => link.title));
|
||||
if (aggregated) {
|
||||
return aggregatePagination(_pagination);
|
||||
}
|
||||
return _pagination;
|
||||
}
|
||||
/**
|
||||
* Get list of links to different translations
|
||||
* @method WikiPage#langlinks
|
||||
* @return {Promise} - includes link objects { lang, title, url }
|
||||
*/
|
||||
function langlinks() {
|
||||
return chain().direct('langlinks');
|
||||
}
|
||||
/**
|
||||
* Get URL for wiki page
|
||||
* @method WikiPage#url
|
||||
* @return {String}
|
||||
*/
|
||||
function url() {
|
||||
return raw.canonicalurl;
|
||||
}
|
||||
const page = Object.assign({}, raw);
|
||||
/**
|
||||
* Returns a QueryChain for the page
|
||||
* @method WikiPage#chain
|
||||
* @returns {QueryChain}
|
||||
*/
|
||||
function chain() {
|
||||
return new QueryChain(apiOptions, raw.pageid);
|
||||
}
|
||||
Object.assign(page, {
|
||||
raw,
|
||||
html,
|
||||
rawContent,
|
||||
content,
|
||||
sections: content,
|
||||
summary,
|
||||
images,
|
||||
references,
|
||||
links,
|
||||
externalLinks,
|
||||
categories,
|
||||
coordinates,
|
||||
info,
|
||||
backlinks,
|
||||
rawImages,
|
||||
mainImage,
|
||||
langlinks,
|
||||
rawInfo,
|
||||
fullInfo,
|
||||
pageImage,
|
||||
tables,
|
||||
url,
|
||||
chain
|
||||
});
|
||||
return page;
|
||||
}
|
9
app/node_modules/wikijs/dist/mjs/util.d.ts
generated
vendored
Normal file
9
app/node_modules/wikijs/dist/mjs/util.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
export function api(apiOptions: any, params?: {}): Promise<any>;
|
||||
export function pagination(apiOptions: any, params: any, parseResults: any): Promise<{
|
||||
results: any;
|
||||
query: any;
|
||||
next(): Promise<any>;
|
||||
}>;
|
||||
export function aggregatePagination(pagination: any, previousResults?: any[]): any;
|
||||
export function aggregate(apiOptions: any, params: any, list: any, key: any, prefix: any, results?: any[]): any;
|
||||
export function parseContent(source: any): never[];
|
149
app/node_modules/wikijs/dist/mjs/util.js
generated
vendored
Normal file
149
app/node_modules/wikijs/dist/mjs/util.js
generated
vendored
Normal file
@@ -0,0 +1,149 @@
|
||||
import fetch from 'cross-fetch';
|
||||
import querystring from 'querystring';
|
||||
const fetchOptions = {
|
||||
method: 'GET',
|
||||
mode: 'cors',
|
||||
credentials: 'omit'
|
||||
};
|
||||
export function api(apiOptions, params = {}) {
|
||||
const qs = Object.assign({
|
||||
format: 'json',
|
||||
action: 'query',
|
||||
redirects: '1'
|
||||
}, params);
|
||||
// Remove undefined properties
|
||||
Object.keys(qs).forEach(key => {
|
||||
if (qs[key] === undefined) {
|
||||
delete qs[key];
|
||||
}
|
||||
});
|
||||
if (apiOptions.origin) {
|
||||
qs.origin = apiOptions.origin;
|
||||
}
|
||||
const url = `${apiOptions.apiUrl}?${querystring.stringify(qs)}`;
|
||||
const headers = Object.assign({ 'User-Agent': 'WikiJS Bot v1.0' }, apiOptions.headers);
|
||||
return fetch(url, Object.assign({ headers }, fetchOptions))
|
||||
.then(res => {
|
||||
if (res.ok) {
|
||||
return res.json();
|
||||
}
|
||||
throw new Error(`${res.status}: ${res.statusText}`);
|
||||
})
|
||||
.then(res => {
|
||||
if (res.error) {
|
||||
throw new Error(res.error.info);
|
||||
}
|
||||
return res;
|
||||
});
|
||||
}
|
||||
export function pagination(apiOptions, params, parseResults) {
|
||||
return api(apiOptions, params).then(res => {
|
||||
let resolution = {};
|
||||
resolution.results = parseResults(res);
|
||||
resolution.query = params.srsearch;
|
||||
if (res['continue']) {
|
||||
const continueType = Object.keys(res['continue']).filter(key => key !== 'continue')[0];
|
||||
const continueKey = res['continue'][continueType];
|
||||
params[continueType] = continueKey;
|
||||
resolution.next = () => pagination(apiOptions, params, parseResults);
|
||||
}
|
||||
return resolution;
|
||||
});
|
||||
}
|
||||
export function aggregatePagination(pagination, previousResults = []) {
|
||||
return pagination.then(res => {
|
||||
const results = [...previousResults, ...res.results];
|
||||
if (res.next) {
|
||||
return aggregatePagination(res.next(), results);
|
||||
}
|
||||
else {
|
||||
return results;
|
||||
}
|
||||
});
|
||||
}
|
||||
const pageLimit = 500;
|
||||
export function aggregate(apiOptions, params, list, key, prefix, results = []) {
|
||||
params.list = list;
|
||||
params[prefix + 'limit'] = pageLimit;
|
||||
return api(apiOptions, params).then(res => {
|
||||
const nextResults = [...results, ...res.query[list].map(e => e[key])];
|
||||
const continueWith = res['query-continue'] || res.continue;
|
||||
if (continueWith) {
|
||||
const nextFromKey = (continueWith[list] && continueWith[list][prefix + 'from']) ||
|
||||
(continueWith[list] && continueWith[list][prefix + 'continue']) ||
|
||||
continueWith[prefix + 'continue'];
|
||||
params[prefix + 'continue'] = nextFromKey;
|
||||
params[prefix + 'from'] = nextFromKey;
|
||||
return aggregate(apiOptions, params, list, key, prefix, nextResults);
|
||||
}
|
||||
return nextResults;
|
||||
});
|
||||
}
|
||||
const headingPattern = /(==+)(?:(?!\n)\s?)((?:(?!==|\n)[^])+)(?:(?!\n)\s?)(==+)/g;
|
||||
function getHeadings(text) {
|
||||
let match;
|
||||
const matches = [];
|
||||
while ((match = headingPattern.exec(text)) !== null) {
|
||||
matches.push({
|
||||
level: match[1].trim().length,
|
||||
text: match[2].trim(),
|
||||
start: match.index,
|
||||
end: match.index + match[0].length
|
||||
});
|
||||
}
|
||||
return matches;
|
||||
}
|
||||
export function parseContent(source) {
|
||||
const headings = getHeadings(source);
|
||||
const minLevel = Math.min(...headings.map(({ level }) => level));
|
||||
const sections = headings.map((heading, index) => {
|
||||
const next = headings[index + 1];
|
||||
const content = source
|
||||
.substring(heading.end, next ? next.start : undefined)
|
||||
.trim();
|
||||
return {
|
||||
title: heading.text,
|
||||
level: heading.level - minLevel,
|
||||
id: index,
|
||||
content,
|
||||
items: []
|
||||
};
|
||||
});
|
||||
const lastParentLevel = (index, level) => {
|
||||
if (level === 0)
|
||||
return null;
|
||||
for (let i = index - 1; i >= 0; i--) {
|
||||
if (sections[i].level < level) {
|
||||
return sections[i].id;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
// Set parents
|
||||
sections.forEach((section, index) => {
|
||||
section.parent = lastParentLevel(index, section.level);
|
||||
});
|
||||
const root = {
|
||||
items: []
|
||||
};
|
||||
const findSection = id => sections.find(s => id === s.id);
|
||||
// Organize
|
||||
sections.forEach(section => {
|
||||
if (section.parent === null) {
|
||||
root.items.push(section);
|
||||
}
|
||||
else {
|
||||
findSection(section.parent).items.push(section);
|
||||
}
|
||||
});
|
||||
// Clean up
|
||||
sections.forEach(section => {
|
||||
delete section.id;
|
||||
delete section.parent;
|
||||
delete section.level;
|
||||
if (!section.items.length) {
|
||||
delete section.items;
|
||||
}
|
||||
});
|
||||
return root.items;
|
||||
}
|
9
app/node_modules/wikijs/dist/mjs/wiki.d.ts
generated
vendored
Normal file
9
app/node_modules/wikijs/dist/mjs/wiki.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* wiki
|
||||
* @example
|
||||
* wiki({ apiUrl: 'http://fr.wikipedia.org/w/api.php' }).search(...);
|
||||
* @namespace Wiki
|
||||
* @param {Object} options
|
||||
* @return {Object} - wiki (for chaining methods)
|
||||
*/
|
||||
export default function wiki(options?: Object): Object;
|
320
app/node_modules/wikijs/dist/mjs/wiki.js
generated
vendored
Normal file
320
app/node_modules/wikijs/dist/mjs/wiki.js
generated
vendored
Normal file
@@ -0,0 +1,320 @@
|
||||
'use strict';
|
||||
import fetch from 'cross-fetch';
|
||||
import querystring from 'querystring';
|
||||
import { pagination, api, aggregate } from './util.js';
|
||||
import wikiPage from './page.js';
|
||||
import QueryChain from './chain.js';
|
||||
/**
|
||||
* @namespace
|
||||
* @constant
|
||||
* @property {string} apiUrl - URL of Wikipedia API
|
||||
* @property {string} headers - Headers to pass through to the API request
|
||||
* @property {string} origin - When accessing the API using a cross-domain AJAX
|
||||
* request (CORS), set this to the originating domain. This must be included in
|
||||
* any pre-flight request, and therefore must be part of the request URI (not
|
||||
* the POST body). This must match one of the origins in the Origin header
|
||||
* exactly, so it has to be set to something like https://en.wikipedia.org or
|
||||
* https://meta.wikimedia.org. If this parameter does not match the Origin
|
||||
* header, a 403 response will be returned. If this parameter matches the Origin
|
||||
* header and the origin is whitelisted, an Access-Control-Allow-Origin header
|
||||
* will be set.
|
||||
*/
|
||||
const defaultOptions = {
|
||||
apiUrl: 'http://en.wikipedia.org/w/api.php',
|
||||
origin: '*'
|
||||
};
|
||||
/**
|
||||
* wiki
|
||||
* @example
|
||||
* wiki({ apiUrl: 'http://fr.wikipedia.org/w/api.php' }).search(...);
|
||||
* @namespace Wiki
|
||||
* @param {Object} options
|
||||
* @return {Object} - wiki (for chaining methods)
|
||||
*/
|
||||
export default function wiki(options = {}) {
|
||||
if (this instanceof wiki) {
|
||||
// eslint-disable-next-line
|
||||
console.log('Please do not use wikijs ^1.0.0 as a class. Please see the new README.');
|
||||
}
|
||||
const apiOptions = Object.assign({}, defaultOptions, options);
|
||||
function handleRedirect(res) {
|
||||
if (res.query.redirects && res.query.redirects.length === 1) {
|
||||
return api(apiOptions, {
|
||||
prop: 'info|pageprops',
|
||||
inprop: 'url',
|
||||
ppprop: 'disambiguation',
|
||||
titles: res.query.redirects[0].to
|
||||
});
|
||||
}
|
||||
return res;
|
||||
}
|
||||
/**
|
||||
* Search articles
|
||||
* @example
|
||||
* wiki.search('star wars').then(data => console.log(data.results.length));
|
||||
* @example
|
||||
* wiki.search('star wars').then(data => {
|
||||
* data.next().then(...);
|
||||
* });
|
||||
* @method Wiki#search
|
||||
* @param {string} query - keyword query
|
||||
* @param {Number} [limit] - limits the number of results
|
||||
* @param {Boolean} [all] - returns entire article objects instead of just titles
|
||||
* @return {Promise} - pagination promise with results and next page function
|
||||
*/
|
||||
function search(query, limit = 50, all = false) {
|
||||
return pagination(apiOptions, {
|
||||
list: 'search',
|
||||
srsearch: query,
|
||||
srlimit: limit
|
||||
}, res => res.query.search.map(article => {
|
||||
return all ? article : article.title;
|
||||
})).catch(err => {
|
||||
if (err.message === '"text" search is disabled.') {
|
||||
// Try backup search method
|
||||
return opensearch(query, limit);
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Search articles using "fuzzy" prefixsearch
|
||||
* @example
|
||||
* wiki.prefixSearch('star wars').then(data => console.log(data.results.length));
|
||||
* @example
|
||||
* wiki.prefixSearch('star wars').then(data => {
|
||||
* data.next().then(...);
|
||||
* });
|
||||
* @method Wiki#prefixSearch
|
||||
* @param {string} query - keyword query
|
||||
* @param {Number} [limit] - limits the number of results
|
||||
* @return {Promise} - pagination promise with results and next page function
|
||||
*/
|
||||
function prefixSearch(query, limit = 50) {
|
||||
return pagination(apiOptions, {
|
||||
list: 'prefixsearch',
|
||||
pslimit: limit,
|
||||
psprofile: 'fuzzy',
|
||||
pssearch: query
|
||||
}, res => res.query.prefixsearch.map(article => article.title));
|
||||
}
|
||||
/**
|
||||
* Opensearch (mainly used as a backup to normal text search)
|
||||
* @param {string} query - keyword query
|
||||
* @param {Number} limit - limits the number of results
|
||||
* @return {Array} List of page title results
|
||||
*/
|
||||
function opensearch(query, limit = 50) {
|
||||
return api(apiOptions, {
|
||||
search: query,
|
||||
limit,
|
||||
namespace: 0,
|
||||
action: 'opensearch',
|
||||
redirects: undefined
|
||||
}).then(res => res[1]);
|
||||
}
|
||||
/**
|
||||
* Random articles
|
||||
* @example
|
||||
* wiki.random(3).then(results => console.log(results[0]));
|
||||
* @method Wiki#random
|
||||
* @param {Number} [limit] - limits the number of random articles
|
||||
* @return {Promise} - List of page titles
|
||||
*/
|
||||
function random(limit = 1) {
|
||||
return api(apiOptions, {
|
||||
list: 'random',
|
||||
rnnamespace: 0,
|
||||
rnlimit: limit
|
||||
}).then(res => res.query.random.map(article => article.title));
|
||||
}
|
||||
/**
|
||||
* Get Page
|
||||
* @example
|
||||
* wiki.page('Batman').then(page => console.log(page.pageid));
|
||||
* @method Wiki#page
|
||||
* @param {string} title - title of article
|
||||
* @return {Promise}
|
||||
*/
|
||||
function page(title) {
|
||||
return api(apiOptions, {
|
||||
prop: 'info|pageprops',
|
||||
inprop: 'url',
|
||||
ppprop: 'disambiguation',
|
||||
titles: title
|
||||
})
|
||||
.then(handleRedirect)
|
||||
.then(res => {
|
||||
const id = Object.keys(res.query.pages)[0];
|
||||
if (!id || id === '-1') {
|
||||
throw new Error('No article found');
|
||||
}
|
||||
return wikiPage(res.query.pages[id], apiOptions);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Get Page by PageId
|
||||
* @example
|
||||
* wiki.findById(4335).then(page => console.log(page.title));
|
||||
* @method Wiki#findById
|
||||
* @param {integer} pageid, id of the page
|
||||
* @return {Promise}
|
||||
*/
|
||||
function findById(pageid) {
|
||||
return api(apiOptions, {
|
||||
prop: 'info|pageprops',
|
||||
inprop: 'url',
|
||||
ppprop: 'disambiguation',
|
||||
pageids: pageid
|
||||
})
|
||||
.then(handleRedirect)
|
||||
.then(res => {
|
||||
const id = Object.keys(res.query.pages)[0];
|
||||
if (!id || id === '-1') {
|
||||
throw new Error('No article found');
|
||||
}
|
||||
return wikiPage(res.query.pages[id], apiOptions);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Find page by query and optional predicate
|
||||
* @example
|
||||
* wiki.find('luke skywalker').then(page => console.log(page.title));
|
||||
* @method Wiki#find
|
||||
* @param {string} search query
|
||||
* @param {function} [predicate] - testing function for choosing which page result to fetch. Default is first result.
|
||||
* @return {Promise}
|
||||
*/
|
||||
function find(query, predicate = results => results[0]) {
|
||||
return search(query)
|
||||
.then(res => predicate(res.results))
|
||||
.then(name => page(name));
|
||||
}
|
||||
/**
|
||||
* Geographical Search
|
||||
* @example
|
||||
* wiki.geoSearch(32.329, -96.136).then(titles => console.log(titles.length));
|
||||
* @method Wiki#geoSearch
|
||||
* @param {Number} lat - latitude
|
||||
* @param {Number} lon - longitude
|
||||
* @param {Number} [radius=1000] - search radius in meters (default: 1km)
|
||||
* @param {Number} [limit=10] - number of results (default: 10 results)
|
||||
* @return {Promise} - List of page titles
|
||||
*/
|
||||
function geoSearch(lat, lon, radius = 1000, limit = 10) {
|
||||
return api(apiOptions, {
|
||||
list: 'geosearch',
|
||||
gsradius: radius,
|
||||
gscoord: `${lat}|${lon}`,
|
||||
gslimit: limit
|
||||
}).then(res => res.query.geosearch.map(article => article.title));
|
||||
}
|
||||
/**
|
||||
* @summary Find the most viewed pages with counts
|
||||
* @example
|
||||
* wiki.mostViewed().then(list => console.log(`${list[0].title}: ${list[0].count}`))
|
||||
* @method Wiki#mostViewed
|
||||
* @returns {Promise} - Array of {title,count}
|
||||
*/
|
||||
function mostViewed() {
|
||||
return api(apiOptions, {
|
||||
list: 'mostviewed'
|
||||
}).then(res => {
|
||||
return res.query.mostviewed.map(({ title, count }) => ({ title, count }));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Fetch all page titles in wiki
|
||||
* @method Wiki#allPages
|
||||
* @return {Array} Array of pages
|
||||
*/
|
||||
function allPages() {
|
||||
return aggregate(apiOptions, {}, 'allpages', 'title', 'ap');
|
||||
}
|
||||
/**
|
||||
* Fetch all categories in wiki
|
||||
* @method Wiki#allCategories
|
||||
* @return {Array} Array of categories
|
||||
*/
|
||||
function allCategories() {
|
||||
return aggregate(apiOptions, {}, 'allcategories', '*', 'ac');
|
||||
}
|
||||
/**
|
||||
* Fetch all pages in category
|
||||
* @method Wiki#pagesInCategory
|
||||
* @param {String} category Category to fetch from
|
||||
* @return {Array} Array of pages
|
||||
*/
|
||||
function pagesInCategory(category) {
|
||||
return aggregate(apiOptions, {
|
||||
cmtitle: category
|
||||
}, 'categorymembers', 'title', 'cm');
|
||||
}
|
||||
/**
|
||||
* @summary Helper function to query API directly
|
||||
* @method Wiki#api
|
||||
* @param {Object} params [https://www.mediawiki.org/wiki/API:Query](https://www.mediawiki.org/wiki/API:Query)
|
||||
* @returns {Promise} Query Response
|
||||
* @example
|
||||
* wiki().api({
|
||||
* action: 'parse',
|
||||
* page: 'Pet_door'
|
||||
* }).then(res => res.parse.title.should.equal('Pet door'));
|
||||
*/
|
||||
function rawApi(params) {
|
||||
return api(apiOptions, params);
|
||||
}
|
||||
/**
|
||||
* @summary Returns a QueryChain to efficiently query specific data
|
||||
* @method Wiki#chain
|
||||
* @returns {QueryChain}
|
||||
* @example
|
||||
* // Find summaries and images of places near a specific location
|
||||
* wiki()
|
||||
* .chain()
|
||||
* .geosearch(52.52437, 13.41053)
|
||||
* .summary()
|
||||
* .image()
|
||||
* .coordinates()
|
||||
* .request()
|
||||
*/
|
||||
function chain() {
|
||||
return new QueryChain(apiOptions);
|
||||
}
|
||||
/**
|
||||
* @summary Returns the Export XML for a page to be used for importing into another MediaWiki
|
||||
* @method Wiki#exportXml
|
||||
* @param {string} pageName
|
||||
* @returns {Promise<string>} Export XML
|
||||
*/
|
||||
function exportXml(pageName) {
|
||||
const qs = {
|
||||
title: 'Special:Export',
|
||||
pages: pageName
|
||||
};
|
||||
// The replace here is kinda hacky since
|
||||
// the export action does not use
|
||||
// the normal api endpoint.
|
||||
const url = `${apiOptions.apiUrl.replace('api', 'index')}?${querystring.stringify(qs)}`;
|
||||
const headers = Object.assign({ 'User-Agent': 'WikiJS Bot v1.0' }, apiOptions.headers);
|
||||
return fetch(url, { headers }).then(res => res.text());
|
||||
}
|
||||
return {
|
||||
search,
|
||||
random,
|
||||
page,
|
||||
geoSearch,
|
||||
options,
|
||||
findById,
|
||||
find,
|
||||
allPages,
|
||||
allCategories,
|
||||
pagesInCategory,
|
||||
opensearch,
|
||||
prefixSearch,
|
||||
mostViewed,
|
||||
api: rawApi,
|
||||
chain,
|
||||
exportXml
|
||||
};
|
||||
}
|
Reference in New Issue
Block a user