Initial Commit
This commit is contained in:
3
scripts/.gitignore
vendored
Normal file
3
scripts/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
/database/
|
||||
/logs/
|
||||
/data/
|
||||
7
scripts/commands/api/load.sh
Normal file
7
scripts/commands/api/load.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
mkdir -p scripts/data
|
||||
curl -L -o scripts/data/channels.json https://iptv-org.github.io/api/channels.json
|
||||
curl -L -o scripts/data/countries.json https://iptv-org.github.io/api/countries.json
|
||||
curl -L -o scripts/data/regions.json https://iptv-org.github.io/api/regions.json
|
||||
curl -L -o scripts/data/subdivisions.json https://iptv-org.github.io/api/subdivisions.json
|
||||
31
scripts/commands/api/update.js
Normal file
31
scripts/commands/api/update.js
Normal file
@@ -0,0 +1,31 @@
|
||||
const { file, parser, logger } = require('../../core')
|
||||
const { program } = require('commander')
|
||||
const _ = require('lodash')
|
||||
|
||||
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs'
|
||||
const OUTPUT_DIR = process.env.OUTPUT_DIR || '.api'
|
||||
|
||||
async function main() {
|
||||
let guides = []
|
||||
|
||||
const logPath = `${LOGS_DIR}/guides/update.log`
|
||||
const results = await parser.parseLogs(logPath)
|
||||
|
||||
for (const result of results) {
|
||||
guides.push({
|
||||
channel: result.channel,
|
||||
site: result.site,
|
||||
lang: result.lang,
|
||||
days: result.days,
|
||||
url: `https://iptv-org.github.io/epg/guides/${result.filename}.xml`
|
||||
})
|
||||
}
|
||||
|
||||
guides = _.sortBy(guides, 'channel')
|
||||
|
||||
const outputFilepath = `${OUTPUT_DIR}/guides.json`
|
||||
await file.create(outputFilepath, JSON.stringify(guides))
|
||||
logger.info(`saved to "${outputFilepath}"...`)
|
||||
}
|
||||
|
||||
main()
|
||||
116
scripts/commands/channels/editor.js
Normal file
116
scripts/commands/channels/editor.js
Normal file
@@ -0,0 +1,116 @@
|
||||
const { api, parser, xml, file, logger } = require('../../core')
|
||||
const { transliterate } = require('transliteration')
|
||||
const nodeCleanup = require('node-cleanup')
|
||||
const { program } = require('commander')
|
||||
const inquirer = require('inquirer')
|
||||
|
||||
program
|
||||
.requiredOption('-i, --input <file>', 'Load channels from the file')
|
||||
.option('-c, --country <name>', 'Source country', 'us')
|
||||
.parse(process.argv)
|
||||
|
||||
const options = program.opts()
|
||||
const defaultCountry = options.country
|
||||
const newLabel = ` [new]`
|
||||
|
||||
let site
|
||||
let channels = []
|
||||
|
||||
async function main() {
|
||||
let result = await parser.parseChannels(options.input)
|
||||
site = result.site
|
||||
channels = result.channels
|
||||
channels = channels.map(c => {
|
||||
c.xmltv_id = c.id
|
||||
return c
|
||||
})
|
||||
await api.channels.load()
|
||||
for (const channel of channels) {
|
||||
if (channel.xmltv_id) continue
|
||||
let choices = await getOptions(channel)
|
||||
const question = {
|
||||
name: 'option',
|
||||
message: `Choose an option:`,
|
||||
type: 'list',
|
||||
choices,
|
||||
pageSize: 10
|
||||
}
|
||||
await inquirer.prompt(question).then(async selected => {
|
||||
switch (selected.option) {
|
||||
case 'Overwrite...':
|
||||
const input = await getInput(channel)
|
||||
channel.xmltv_id = input.xmltv_id
|
||||
break
|
||||
case 'Skip...':
|
||||
channel.xmltv_id = '-'
|
||||
break
|
||||
default:
|
||||
const [name, xmltv_id] = selected.option
|
||||
.replace(/ \[.*\]/, '')
|
||||
.split('|')
|
||||
.map(i => i.trim().replace(newLabel, ''))
|
||||
channel.xmltv_id = xmltv_id
|
||||
break
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
function save() {
|
||||
const output = xml.create(channels, site)
|
||||
|
||||
file.writeSync(options.input, output)
|
||||
|
||||
logger.info(`\nFile '${options.input}' successfully saved`)
|
||||
}
|
||||
|
||||
nodeCleanup(() => {
|
||||
save()
|
||||
})
|
||||
|
||||
async function getInput(channel) {
|
||||
const name = channel.name.trim()
|
||||
const input = await inquirer.prompt([
|
||||
{
|
||||
name: 'xmltv_id',
|
||||
message: ' ID:',
|
||||
type: 'input',
|
||||
default: generateCode(name, defaultCountry)
|
||||
}
|
||||
])
|
||||
|
||||
return { name, xmltv_id: input['xmltv_id'] }
|
||||
}
|
||||
|
||||
async function getOptions(channel) {
|
||||
const channels = await api.channels.all()
|
||||
const channelId = generateCode(channel.name, defaultCountry)
|
||||
const similar = await getSimilar(channels, channelId)
|
||||
let variants = []
|
||||
variants.push(`${channel.name.trim()} | ${channelId}${newLabel}`)
|
||||
similar.forEach(i => {
|
||||
let alt_names = i.alt_names.length ? ` (${i.alt_names.join(',')})` : ''
|
||||
let closed = i.closed ? `[closed:${i.closed}]` : ``
|
||||
let replaced_by = i.replaced_by ? `[replaced_by:${i.replaced_by}]` : ''
|
||||
variants.push(`${i.name}${alt_names} | ${i.id} ${closed}${replaced_by}[api]`)
|
||||
})
|
||||
variants.push(`Overwrite...`)
|
||||
variants.push(`Skip...`)
|
||||
|
||||
return variants
|
||||
}
|
||||
|
||||
async function getSimilar(list, channelId) {
|
||||
const normChannelId = channelId.split('.')[0].slice(0, 8).toLowerCase()
|
||||
return list.filter(i => i.id.split('.')[0].toLowerCase().startsWith(normChannelId))
|
||||
}
|
||||
|
||||
function generateCode(name, country) {
|
||||
const id = transliterate(name)
|
||||
.replace(/\+/gi, 'Plus')
|
||||
.replace(/[^a-z\d]+/gi, '')
|
||||
|
||||
return `${id}.${country}`
|
||||
}
|
||||
76
scripts/commands/channels/lint.js
Normal file
76
scripts/commands/channels/lint.js
Normal file
@@ -0,0 +1,76 @@
|
||||
const chalk = require('chalk')
|
||||
const libxml = require('libxmljs2')
|
||||
const { program } = require('commander')
|
||||
const { logger, file } = require('../../core')
|
||||
|
||||
const xsd = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
|
||||
<xs:element name="site">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element ref="channels"/>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="site" use="required" type="xs:string"/>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:element name="channels">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element minOccurs="0" maxOccurs="unbounded" ref="channel"/>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:element name="channel">
|
||||
<xs:complexType mixed="true">
|
||||
<xs:attribute name="lang" use="required" type="xs:string"/>
|
||||
<xs:attribute name="site_id" use="required" type="xs:string"/>
|
||||
<xs:attribute name="xmltv_id" use="required" type="xs:string"/>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
</xs:schema>`
|
||||
|
||||
program.argument('<filepath>', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
if (!program.args.length) {
|
||||
logger.error('required argument "filepath" not specified')
|
||||
}
|
||||
|
||||
let errors = []
|
||||
|
||||
for (const filepath of program.args) {
|
||||
if (!filepath.endsWith('.xml')) continue
|
||||
|
||||
const xml = await file.read(filepath)
|
||||
|
||||
let localErrors = []
|
||||
|
||||
try {
|
||||
const xsdDoc = libxml.parseXml(xsd)
|
||||
const doc = libxml.parseXml(xml)
|
||||
|
||||
if (!doc.validate(xsdDoc)) {
|
||||
localErrors = doc.validationErrors
|
||||
}
|
||||
} catch (error) {
|
||||
localErrors.push(error)
|
||||
}
|
||||
|
||||
if (localErrors.length) {
|
||||
logger.info(`\n${chalk.underline(filepath)}`)
|
||||
localErrors.forEach(error => {
|
||||
const position = `${error.line}:${error.column}`
|
||||
logger.error(` ${chalk.gray(position.padEnd(4, ' '))} ${error.message.trim()}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(localErrors)
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
logger.error(chalk.red(`\n${errors.length} error(s)`))
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
48
scripts/commands/channels/parse.js
Normal file
48
scripts/commands/channels/parse.js
Normal file
@@ -0,0 +1,48 @@
|
||||
const { logger, file, xml } = require('../../core')
|
||||
const { Command } = require('commander')
|
||||
const path = require('path')
|
||||
const _ = require('lodash')
|
||||
|
||||
const program = new Command()
|
||||
program
|
||||
.requiredOption('-c, --config <config>', 'Config file')
|
||||
.option('-s, --set [args...]', 'Set custom arguments', [])
|
||||
.option('-o, --output <output>', 'Output file')
|
||||
.parse(process.argv)
|
||||
|
||||
const options = program.opts()
|
||||
|
||||
async function main() {
|
||||
const config = require(path.resolve(options.config))
|
||||
const args = {}
|
||||
options.set.forEach(arg => {
|
||||
const [key, value] = arg.split(':')
|
||||
args[key] = value
|
||||
})
|
||||
|
||||
let channels = config.channels(args)
|
||||
if (isPromise(channels)) {
|
||||
channels = await channels
|
||||
}
|
||||
channels = channels.map(c => {
|
||||
c.lang = c.lang || 'en'
|
||||
|
||||
return c
|
||||
})
|
||||
channels = _.sortBy(channels, ['lang', 'xmltv_id'])
|
||||
|
||||
const dir = file.dirname(options.config)
|
||||
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
|
||||
|
||||
const output = xml.create(channels, config.site)
|
||||
|
||||
await file.write(outputFilepath, output)
|
||||
|
||||
logger.info(`File '${outputFilepath}' successfully saved`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
function isPromise(promise) {
|
||||
return !!promise && typeof promise.then === 'function'
|
||||
}
|
||||
68
scripts/commands/channels/validate.js
Normal file
68
scripts/commands/channels/validate.js
Normal file
@@ -0,0 +1,68 @@
|
||||
const { parser, logger, api } = require('../../core')
|
||||
const { program } = require('commander')
|
||||
const chalk = require('chalk')
|
||||
const langs = require('langs')
|
||||
|
||||
program.argument('<filepath>', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
await api.channels.load()
|
||||
|
||||
const stats = {
|
||||
files: 0,
|
||||
errors: 0
|
||||
}
|
||||
|
||||
if (!program.args.length) {
|
||||
logger.error('required argument "filepath" not specified')
|
||||
}
|
||||
|
||||
for (const filepath of program.args) {
|
||||
if (!filepath.endsWith('.xml')) continue
|
||||
|
||||
const { site, channels } = await parser.parseChannels(filepath)
|
||||
|
||||
const bufferById = {}
|
||||
const bufferBySiteId = {}
|
||||
const errors = []
|
||||
for (const channel of channels) {
|
||||
if (!bufferById[channel.id + channel.lang]) {
|
||||
bufferById[channel.id + channel.lang] = channel
|
||||
} else {
|
||||
errors.push({ type: 'duplicate', xmltv_id: channel.id, ...channel })
|
||||
stats.errors++
|
||||
}
|
||||
|
||||
if (!bufferBySiteId[channel.site_id + channel.lang]) {
|
||||
bufferBySiteId[channel.site_id + channel.lang] = channel
|
||||
} else {
|
||||
errors.push({ type: 'duplicate', xmltv_id: channel.id, ...channel })
|
||||
stats.errors++
|
||||
}
|
||||
|
||||
if (!api.channels.find({ id: channel.id })) {
|
||||
errors.push({ type: 'wrong_xmltv_id', xmltv_id: channel.id, ...channel })
|
||||
stats.errors++
|
||||
}
|
||||
|
||||
if (!langs.where('1', channel.lang)) {
|
||||
errors.push({ type: 'wrong_lang', xmltv_id: channel.id, ...channel })
|
||||
stats.errors++
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
logger.info(chalk.underline(filepath))
|
||||
console.table(errors, ['type', 'lang', 'xmltv_id', 'site_id', 'name'])
|
||||
console.log()
|
||||
stats.files++
|
||||
}
|
||||
}
|
||||
|
||||
if (stats.errors > 0) {
|
||||
logger.error(chalk.red(`${stats.errors} error(s) in ${stats.files} file(s)`))
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
89
scripts/commands/cluster/load.js
Normal file
89
scripts/commands/cluster/load.js
Normal file
@@ -0,0 +1,89 @@
|
||||
const _ = require('lodash')
|
||||
const { EPGGrabber, Channel } = require('epg-grabber')
|
||||
const { program } = require('commander')
|
||||
const { db, logger, timer, file, parser } = require('../../core')
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
|
||||
dayjs.extend(utc)
|
||||
|
||||
const options = program
|
||||
.requiredOption('-c, --cluster-id <cluster-id>', 'The ID of cluster to load', parser.parseNumber)
|
||||
.option('--delay <delay>', 'Delay between requests (in mileseconds)', parser.parseNumber)
|
||||
.option(
|
||||
'-t, --timeout <timeout>',
|
||||
'Set a timeout for each request (in mileseconds)',
|
||||
parser.parseNumber
|
||||
)
|
||||
.option('--debug', 'Enable debug mode', false)
|
||||
.parse(process.argv)
|
||||
.opts()
|
||||
|
||||
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs'
|
||||
const CLUSTER_PATH = `${LOGS_DIR}/cluster/load/cluster_${options.clusterId}.log`
|
||||
|
||||
async function main() {
|
||||
logger.info('Starting...')
|
||||
timer.start()
|
||||
|
||||
logger.info(`Loading cluster: ${options.clusterId}`)
|
||||
logger.info(`Creating '${CLUSTER_PATH}'...`)
|
||||
await file.create(CLUSTER_PATH)
|
||||
await db.queue.load()
|
||||
let items = await db.queue.find({ cluster_id: options.clusterId })
|
||||
items = _.orderBy(items, [i => i.channel.id.toLowerCase(), 'date'])
|
||||
const total = items.length
|
||||
|
||||
logger.info('Loading...')
|
||||
let i = 1
|
||||
let totalPrograms = 0
|
||||
let config = require(file.resolve(items[0].configPath))
|
||||
config = _.merge(config, {
|
||||
debug: options.debug,
|
||||
delay: options.delay,
|
||||
request: {
|
||||
timeout: options.timeout
|
||||
}
|
||||
})
|
||||
const grabber = new EPGGrabber(config)
|
||||
for (const item of items) {
|
||||
const channel = new Channel(item.channel)
|
||||
|
||||
await new Promise(resolve => {
|
||||
grabber.grab(channel, item.date, async (data, err) => {
|
||||
logger.info(
|
||||
`[${i}/${total}] ${channel.site} (${channel.lang}) - ${channel.id} - ${dayjs
|
||||
.utc(data.date)
|
||||
.format('MMM D, YYYY')} (${data.programs.length} programs)`
|
||||
)
|
||||
|
||||
if (err) logger.error(err.message)
|
||||
|
||||
const result = {
|
||||
_qid: item._id,
|
||||
programs: data.programs,
|
||||
error: err ? err.message : null
|
||||
}
|
||||
|
||||
await file.append(CLUSTER_PATH, JSON.stringify(result) + '\n')
|
||||
|
||||
totalPrograms += data.programs.length
|
||||
|
||||
if (i < total) i++
|
||||
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
db.queue.compact()
|
||||
|
||||
logger.info(`Done in ${timer.format('HH[h] mm[m] ss[s]')}`)
|
||||
|
||||
if (totalPrograms === 0) {
|
||||
logger.error('\nError: No programs found')
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
115
scripts/commands/database/load.js
Normal file
115
scripts/commands/database/load.js
Normal file
@@ -0,0 +1,115 @@
|
||||
const { Octokit } = require('@octokit/core')
|
||||
const dayjs = require('dayjs')
|
||||
const isToday = require('dayjs/plugin/isToday')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const unzipit = require('unzipit')
|
||||
const { file, logger } = require('../../core')
|
||||
|
||||
dayjs.extend(isToday)
|
||||
dayjs.extend(utc)
|
||||
|
||||
const DB_DIR = process.env.DB_DIR || './scripts/database'
|
||||
const programsPath = `${DB_DIR}/programs.db`
|
||||
const queuePath = `${DB_DIR}/queue.db`
|
||||
|
||||
const octokit = new Octokit({
|
||||
auth: process.env.GITHUB_TOKEN
|
||||
})
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
let workflows = await getWorkflows()
|
||||
logger.info(`found ${workflows.length} workflows\r\n`)
|
||||
|
||||
await file.create(programsPath)
|
||||
await file.create(queuePath)
|
||||
const total = workflows.length
|
||||
for (let [i, workflow] of workflows.entries()) {
|
||||
logger.info(`[${i + 1}/${total}] ${workflow.name}`)
|
||||
const run = await getWorkflowRun(workflow)
|
||||
if (!run) continue
|
||||
|
||||
let artifact = await getRunArtifacts(run)
|
||||
|
||||
const programsBuffer = await downloadArtifact(artifact, 'programs.db')
|
||||
await file.append(programsPath, programsBuffer)
|
||||
|
||||
const queueBuffer = await downloadArtifact(artifact, 'queue.db')
|
||||
await file.append(queuePath, queueBuffer)
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(err.message)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function downloadArtifact(artifact, filename) {
|
||||
let results = await octokit.request(
|
||||
'GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}',
|
||||
{
|
||||
owner: 'iptv-org',
|
||||
repo: 'epg',
|
||||
artifact_id: artifact.id,
|
||||
archive_format: 'zip'
|
||||
}
|
||||
)
|
||||
|
||||
const { entries } = await unzipit.unzip(results.data)
|
||||
|
||||
const arrayBuffer = await entries[filename].arrayBuffer()
|
||||
|
||||
return toString(arrayBuffer)
|
||||
}
|
||||
|
||||
async function getRunArtifacts(run) {
|
||||
let results = await octokit.request('GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts', {
|
||||
owner: 'iptv-org',
|
||||
repo: 'epg',
|
||||
run_id: run.id
|
||||
})
|
||||
|
||||
return results.data.artifacts.find(a => a.name === 'database')
|
||||
}
|
||||
|
||||
async function getWorkflowRun(workflow) {
|
||||
let today = dayjs.utc().subtract(1, 'd').format('YYYY-MM-DD')
|
||||
let results = await octokit.request(
|
||||
'GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs',
|
||||
{
|
||||
owner: 'iptv-org',
|
||||
repo: 'epg',
|
||||
workflow_id: workflow.id,
|
||||
status: 'success',
|
||||
created: `>=${today}`
|
||||
}
|
||||
)
|
||||
|
||||
return results.data.workflow_runs.find(
|
||||
r => r.event === 'schedule' || r.event === 'workflow_dispatch'
|
||||
)
|
||||
}
|
||||
|
||||
async function getWorkflows() {
|
||||
let workflows = []
|
||||
for (let page of [1, 2, 3]) {
|
||||
try {
|
||||
let results = await octokit.request('GET /repos/{owner}/{repo}/actions/workflows', {
|
||||
owner: 'iptv-org',
|
||||
repo: 'epg',
|
||||
per_page: 100,
|
||||
page
|
||||
})
|
||||
|
||||
workflows = workflows.concat(results.data.workflows)
|
||||
} catch (err) {
|
||||
console.log(err.message)
|
||||
}
|
||||
}
|
||||
|
||||
return workflows.filter(w => !/^_/.test(w.name) && w.name !== 'pages-build-deployment')
|
||||
}
|
||||
|
||||
function toString(arrayBuffer) {
|
||||
return new TextDecoder().decode(arrayBuffer)
|
||||
}
|
||||
117
scripts/commands/guides/update.js
Normal file
117
scripts/commands/guides/update.js
Normal file
@@ -0,0 +1,117 @@
|
||||
const { db, api, logger, file, zip } = require('../../core')
|
||||
const { generateXMLTV, Program, Channel } = require('epg-grabber')
|
||||
const _ = require('lodash')
|
||||
|
||||
const PUBLIC_DIR = process.env.PUBLIC_DIR || '.gh-pages'
|
||||
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs'
|
||||
const CURR_DATE = process.env.CURR_DATE || new Date()
|
||||
|
||||
const logPath = `${LOGS_DIR}/guides/update.log`
|
||||
|
||||
let api_channels = {}
|
||||
let db_queue = []
|
||||
let db_programs = []
|
||||
let guides = []
|
||||
|
||||
async function main() {
|
||||
logger.info(`starting...`)
|
||||
|
||||
logger.info('loading data/channels.json...')
|
||||
await api.channels.load()
|
||||
|
||||
api.channels.all().forEach(channel => {
|
||||
api_channels[channel.id] = channel
|
||||
})
|
||||
|
||||
logger.info('loading database/queue.db...')
|
||||
await db.queue.load()
|
||||
db_queue = await db.queue.find({})
|
||||
logger.info(`found ${db_queue.length} items`)
|
||||
|
||||
logger.info('loading database/programs.db...')
|
||||
await db.programs.load()
|
||||
db_programs = await db.programs.find({})
|
||||
logger.info(`found ${db_programs.length} programs`)
|
||||
|
||||
await generate()
|
||||
|
||||
logger.info(`creating ${logPath}...`)
|
||||
await file.create(logPath, guides.map(g => JSON.stringify(g)).join('\r\n'))
|
||||
|
||||
logger.info('finished')
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function generate() {
|
||||
let queue = _.uniqBy(db_queue, i => i.channel.lang + i.channel.id + i.channel.site)
|
||||
queue = _.groupBy(queue, i => (i.channel ? `${i.channel.lang}/${i.channel.site}` : `_`))
|
||||
delete queue['_']
|
||||
|
||||
let programs = _.groupBy(db_programs, p =>
|
||||
p.titles.length ? `${p.titles[0].lang}/${p.site}` : `_`
|
||||
)
|
||||
|
||||
delete programs['_']
|
||||
|
||||
for (let filename in queue) {
|
||||
if (!queue[filename]) continue
|
||||
const channels = queue[filename].map(i => {
|
||||
const channelData = api_channels[i.channel.id]
|
||||
channelData.site = i.channel.site
|
||||
channelData.site_id = i.channel.site_id
|
||||
channelData.lang = i.channel.lang
|
||||
|
||||
return new Channel(channelData)
|
||||
})
|
||||
|
||||
await save(filename, channels, programs[filename])
|
||||
|
||||
for (let channel of channels) {
|
||||
const configPath = `sites/${channel.site}/${channel.site}.config.js`
|
||||
const config = require(file.resolve(configPath))
|
||||
|
||||
guides.push({
|
||||
site: channel.site,
|
||||
lang: channel.lang,
|
||||
days: config.days,
|
||||
channel: channel.id,
|
||||
filename
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function save(filepath, channels, programs = []) {
|
||||
let output = {
|
||||
channels,
|
||||
programs: [],
|
||||
date: CURR_DATE
|
||||
}
|
||||
|
||||
for (let programData of programs) {
|
||||
let channel = channels.find(c => c.id === programData.channel)
|
||||
if (!channel) continue
|
||||
|
||||
let program = new Program(programData, channel)
|
||||
|
||||
output.programs.push(program)
|
||||
}
|
||||
|
||||
output.channels = _.sortBy(output.channels, 'id')
|
||||
output.channels = _.uniqBy(output.channels, 'id')
|
||||
|
||||
output.programs = _.sortBy(output.programs, ['channel', 'start'])
|
||||
output.programs = _.uniqBy(output.programs, p => p.channel + p.start)
|
||||
|
||||
const xmlFilepath = `${PUBLIC_DIR}/guides/${filepath}.xml`
|
||||
const gzFilepath = `${PUBLIC_DIR}/guides/${filepath}.xml.gz`
|
||||
logger.info(`creating ${xmlFilepath}...`)
|
||||
const xmltv = generateXMLTV(output)
|
||||
await file.create(xmlFilepath, xmltv)
|
||||
logger.info(`creating ${gzFilepath}...`)
|
||||
const compressed = await zip.compress(xmltv)
|
||||
await file.create(gzFilepath, compressed)
|
||||
|
||||
return output
|
||||
}
|
||||
35
scripts/commands/programs/save.js
Normal file
35
scripts/commands/programs/save.js
Normal file
@@ -0,0 +1,35 @@
|
||||
const { db, logger, file, parser } = require('../../core')
|
||||
const { Program, Channel } = require('epg-grabber')
|
||||
const _ = require('lodash')
|
||||
|
||||
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs'
|
||||
|
||||
async function main() {
|
||||
await db.queue.load()
|
||||
await db.programs.load()
|
||||
await db.programs.reset()
|
||||
const files = await file.list(`${LOGS_DIR}/cluster/load/cluster_*.log`)
|
||||
for (const filepath of files) {
|
||||
logger.info(`Parsing "${filepath}"...`)
|
||||
const results = await parser.parseLogs(filepath)
|
||||
for (const result of results) {
|
||||
const queue = await db.queue.find({ _id: result._qid }).limit(1)
|
||||
if (!queue.length) continue
|
||||
const item = queue[0]
|
||||
const c = new Channel(item.channel)
|
||||
const programs = result.programs.map(p => {
|
||||
p = new Program(p, c)
|
||||
p._qid = result._qid
|
||||
|
||||
return p
|
||||
})
|
||||
await db.programs.insert(programs)
|
||||
|
||||
await db.queue.update({ _id: result._qid }, { $set: { error: result.error } })
|
||||
}
|
||||
}
|
||||
|
||||
await db.queue.compact()
|
||||
}
|
||||
|
||||
main()
|
||||
103
scripts/commands/queue/create.js
Normal file
103
scripts/commands/queue/create.js
Normal file
@@ -0,0 +1,103 @@
|
||||
const { db, file, parser, logger, date, api } = require('../../core')
|
||||
const { program } = require('commander')
|
||||
const _ = require('lodash')
|
||||
|
||||
const options = program
|
||||
.option(
|
||||
'--max-clusters <max-clusters>',
|
||||
'Set maximum number of clusters',
|
||||
parser.parseNumber,
|
||||
256
|
||||
)
|
||||
.parse(process.argv)
|
||||
.opts()
|
||||
|
||||
const CHANNELS_PATH = process.env.CHANNELS_PATH || 'sites/**/*.channels.xml'
|
||||
const CURR_DATE = process.env.CURR_DATE || new Date()
|
||||
|
||||
async function main() {
|
||||
logger.info('Starting...')
|
||||
logger.info(`Number of clusters: ${options.maxClusters}`)
|
||||
|
||||
await saveToDatabase(await createQueue())
|
||||
|
||||
logger.info('Done')
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function createQueue() {
|
||||
logger.info(`Create queue...`)
|
||||
|
||||
let queue = {}
|
||||
|
||||
await api.channels.load().catch(console.error)
|
||||
const files = await file.list(CHANNELS_PATH).catch(console.error)
|
||||
const utcDate = date.getUTC(CURR_DATE)
|
||||
for (const filepath of files) {
|
||||
try {
|
||||
const dir = file.dirname(filepath)
|
||||
const { site, channels } = await parser.parseChannels(filepath)
|
||||
if (!site) continue
|
||||
const configPath = `${dir}/${site}.config.js`
|
||||
const config = require(file.resolve(configPath))
|
||||
if (config.skip) continue
|
||||
const filename = file.basename(filepath)
|
||||
const days = config.days || 1
|
||||
const dates = Array.from({ length: days }, (_, i) => utcDate.add(i, 'd'))
|
||||
for (const channel of channels) {
|
||||
if (!channel.site || !channel.id) continue
|
||||
const found = api.channels.find({ id: channel.id })
|
||||
if (!found) continue
|
||||
channel.logo = found.logo
|
||||
for (const d of dates) {
|
||||
const dString = d.toJSON()
|
||||
const key = `${channel.site}:${channel.lang}:${channel.id}:${dString}`
|
||||
if (!queue[key]) {
|
||||
queue[key] = {
|
||||
channel,
|
||||
date: dString,
|
||||
configPath,
|
||||
error: null
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
queue = Object.values(queue)
|
||||
|
||||
logger.info(`Added ${queue.length} items`)
|
||||
|
||||
return queue
|
||||
}
|
||||
|
||||
async function saveToDatabase(items = []) {
|
||||
logger.info('Saving to the database...')
|
||||
await db.queue.load()
|
||||
await db.queue.reset()
|
||||
let queue = []
|
||||
const chunks = split(_.shuffle(items), options.maxClusters)
|
||||
for (const [i, chunk] of chunks.entries()) {
|
||||
for (const item of chunk) {
|
||||
item.cluster_id = i + 1
|
||||
queue.push(item)
|
||||
}
|
||||
}
|
||||
|
||||
queue = _.sortBy(queue, ['channel.lang', 'channel.xmltv_id', 'date'])
|
||||
|
||||
await db.queue.insert(queue)
|
||||
}
|
||||
|
||||
function split(arr, n) {
|
||||
let result = []
|
||||
for (let i = n; i > 0; i--) {
|
||||
result.push(arr.splice(0, Math.ceil(arr.length / i)))
|
||||
}
|
||||
return result
|
||||
}
|
||||
86
scripts/commands/readme/update.js
Normal file
86
scripts/commands/readme/update.js
Normal file
@@ -0,0 +1,86 @@
|
||||
const { file, markdown, parser, logger, api, table } = require('../../core')
|
||||
const { program } = require('commander')
|
||||
const langs = require('langs')
|
||||
const _ = require('lodash')
|
||||
|
||||
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs'
|
||||
|
||||
const options = program
|
||||
.option('-c, --config <config>', 'Set path to config file', '.readme/readme.json')
|
||||
.parse(process.argv)
|
||||
.opts()
|
||||
|
||||
async function main() {
|
||||
await api.countries.load().catch(console.error)
|
||||
const logPath = `${LOGS_DIR}/guides/update.log`
|
||||
let log = await parser.parseLogs(logPath)
|
||||
|
||||
await createTable(log)
|
||||
|
||||
await updateReadme()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function createTable(log) {
|
||||
let files = log.reduce((acc, curr) => {
|
||||
if (!acc[curr.filename]) {
|
||||
acc[curr.filename] = {
|
||||
site: curr.site,
|
||||
lang: curr.lang,
|
||||
channels: 0,
|
||||
filename: curr.filename
|
||||
}
|
||||
}
|
||||
|
||||
acc[curr.filename].channels++
|
||||
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
let groups = {}
|
||||
for (const filename in files) {
|
||||
const item = files[filename]
|
||||
const lang = langs.where('1', item.lang)
|
||||
|
||||
if (!lang) continue
|
||||
|
||||
if (!groups[lang.name]) groups[lang.name] = { lang: lang.name, data: [] }
|
||||
|
||||
groups[lang.name].data.push([
|
||||
`<a href="https://${item.site}">${item.site}</a>`,
|
||||
item.channels,
|
||||
`<code>https://iptv-org.github.io/epg/guides/${filename}.xml</code>`,
|
||||
`<a href="https://github.com/iptv-org/epg/actions/workflows/${item.site}.yml"><img src="https://github.com/iptv-org/epg/actions/workflows/${item.site}.yml/badge.svg" alt="${item.site}" style="max-width: 100%;"></a>`
|
||||
])
|
||||
}
|
||||
|
||||
groups = _.sortBy(Object.values(groups), 'lang')
|
||||
|
||||
let guides = ''
|
||||
for (let group of groups) {
|
||||
let lang = group.lang
|
||||
let data = group.data
|
||||
|
||||
data = _.orderBy(data, [item => item[0], item => item[1]], ['asc', 'desc'])
|
||||
data = Object.values(_.groupBy(data, item => item[0]))
|
||||
|
||||
guides += `### ${lang}\r\n\r\n`
|
||||
guides += table.create(data, [
|
||||
'Site ',
|
||||
'Channels',
|
||||
'EPG ',
|
||||
'Status '
|
||||
])
|
||||
guides += `\r\n\r\n`
|
||||
}
|
||||
await file.create('./.readme/_guides.md', guides)
|
||||
}
|
||||
|
||||
async function updateReadme() {
|
||||
logger.info('updating readme.md...')
|
||||
|
||||
const config = require(file.resolve(options.config))
|
||||
await file.createDir(file.dirname(config.build))
|
||||
await markdown.compile(options.config)
|
||||
}
|
||||
32
scripts/core/api.js
Normal file
32
scripts/core/api.js
Normal file
@@ -0,0 +1,32 @@
|
||||
const _ = require('lodash')
|
||||
const file = require('./file')
|
||||
|
||||
const DATA_DIR = process.env.DATA_DIR || './scripts/data'
|
||||
|
||||
class API {
|
||||
constructor(filepath) {
|
||||
this.filepath = file.resolve(filepath)
|
||||
}
|
||||
|
||||
async load() {
|
||||
const data = await file.read(this.filepath)
|
||||
this.collection = JSON.parse(data)
|
||||
}
|
||||
|
||||
find(query) {
|
||||
return _.find(this.collection, query)
|
||||
}
|
||||
|
||||
all() {
|
||||
return this.collection
|
||||
}
|
||||
}
|
||||
|
||||
const api = {}
|
||||
|
||||
api.channels = new API(`${DATA_DIR}/channels.json`)
|
||||
api.regions = new API(`${DATA_DIR}/regions.json`)
|
||||
api.countries = new API(`${DATA_DIR}/countries.json`)
|
||||
api.subdivisions = new API(`${DATA_DIR}/subdivisions.json`)
|
||||
|
||||
module.exports = api
|
||||
13
scripts/core/date.js
Normal file
13
scripts/core/date.js
Normal file
@@ -0,0 +1,13 @@
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
dayjs.extend(utc)
|
||||
|
||||
const date = {}
|
||||
|
||||
date.getUTC = function (d = null) {
|
||||
if (typeof d === 'string') return dayjs.utc(d).startOf('d')
|
||||
|
||||
return dayjs.utc().startOf('d')
|
||||
}
|
||||
|
||||
module.exports = date
|
||||
76
scripts/core/db.js
Normal file
76
scripts/core/db.js
Normal file
@@ -0,0 +1,76 @@
|
||||
const nedb = require('nedb-promises')
|
||||
const file = require('./file')
|
||||
|
||||
const DB_DIR = process.env.DB_DIR || './scripts/database'
|
||||
|
||||
class Database {
|
||||
constructor(filepath) {
|
||||
this.filepath = filepath
|
||||
}
|
||||
|
||||
load() {
|
||||
this.db = nedb.create({
|
||||
filename: file.resolve(this.filepath),
|
||||
autoload: true,
|
||||
onload: err => {
|
||||
if (err) console.error(err)
|
||||
},
|
||||
compareStrings: (a, b) => {
|
||||
a = a.replace(/\s/g, '_')
|
||||
b = b.replace(/\s/g, '_')
|
||||
|
||||
return a.localeCompare(b, undefined, {
|
||||
sensitivity: 'accent',
|
||||
numeric: true
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
removeIndex(field) {
|
||||
return this.db.removeIndex(field)
|
||||
}
|
||||
|
||||
addIndex(options) {
|
||||
return this.db.ensureIndex(options)
|
||||
}
|
||||
|
||||
compact() {
|
||||
return this.db.persistence.compactDatafile()
|
||||
}
|
||||
|
||||
stopAutocompact() {
|
||||
return this.db.persistence.stopAutocompaction()
|
||||
}
|
||||
|
||||
reset() {
|
||||
return file.clear(this.filepath)
|
||||
}
|
||||
|
||||
count(query) {
|
||||
return this.db.count(query)
|
||||
}
|
||||
|
||||
insert(doc) {
|
||||
return this.db.insert(doc)
|
||||
}
|
||||
|
||||
update(query, update) {
|
||||
return this.db.update(query, update)
|
||||
}
|
||||
|
||||
find(query) {
|
||||
return this.db.find(query)
|
||||
}
|
||||
|
||||
remove(query, options) {
|
||||
return this.db.remove(query, options)
|
||||
}
|
||||
}
|
||||
|
||||
const db = {}
|
||||
|
||||
db.queue = new Database(`${DB_DIR}/queue.db`)
|
||||
db.programs = new Database(`${DB_DIR}/programs.db`)
|
||||
|
||||
module.exports = db
|
||||
72
scripts/core/file.js
Normal file
72
scripts/core/file.js
Normal file
@@ -0,0 +1,72 @@
|
||||
const path = require('path')
|
||||
const glob = require('glob')
|
||||
const fs = require('fs-extra')
|
||||
|
||||
const file = {}
|
||||
|
||||
file.list = function (pattern) {
|
||||
return new Promise(resolve => {
|
||||
glob(pattern, function (err, files) {
|
||||
resolve(files)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
file.getFilename = function (filepath) {
|
||||
return path.parse(filepath).name
|
||||
}
|
||||
|
||||
file.createDir = async function (dir) {
|
||||
if (await file.exists(dir)) return
|
||||
|
||||
return fs.mkdir(dir, { recursive: true }).catch(console.error)
|
||||
}
|
||||
|
||||
file.exists = function (filepath) {
|
||||
return fs.exists(path.resolve(filepath))
|
||||
}
|
||||
|
||||
file.read = function (filepath) {
|
||||
return fs.readFile(path.resolve(filepath), { encoding: 'utf8' }).catch(console.error)
|
||||
}
|
||||
|
||||
file.append = function (filepath, data) {
|
||||
return fs.appendFile(path.resolve(filepath), data).catch(console.error)
|
||||
}
|
||||
|
||||
file.create = function (filepath, data = '') {
|
||||
filepath = path.resolve(filepath)
|
||||
const dir = path.dirname(filepath)
|
||||
|
||||
return file
|
||||
.createDir(dir)
|
||||
.then(() => file.write(filepath, data))
|
||||
.catch(console.error)
|
||||
}
|
||||
|
||||
file.write = function (filepath, data = '') {
|
||||
return fs.writeFile(path.resolve(filepath), data, { encoding: 'utf8' }).catch(console.error)
|
||||
}
|
||||
|
||||
file.writeSync = function (filepath, data = '') {
|
||||
return fs.writeFileSync(path.resolve(filepath), data, { encoding: 'utf8' })
|
||||
}
|
||||
|
||||
file.clear = async function (filepath) {
|
||||
if (await file.exists(filepath)) return file.write(filepath, '')
|
||||
return true
|
||||
}
|
||||
|
||||
file.resolve = function (filepath) {
|
||||
return path.resolve(filepath)
|
||||
}
|
||||
|
||||
file.dirname = function (filepath) {
|
||||
return path.dirname(filepath)
|
||||
}
|
||||
|
||||
file.basename = function (filepath) {
|
||||
return path.basename(filepath)
|
||||
}
|
||||
|
||||
module.exports = file
|
||||
11
scripts/core/index.js
Normal file
11
scripts/core/index.js
Normal file
@@ -0,0 +1,11 @@
|
||||
exports.db = require('./db')
|
||||
exports.logger = require('./logger')
|
||||
exports.file = require('./file')
|
||||
exports.parser = require('./parser')
|
||||
exports.timer = require('./timer')
|
||||
exports.markdown = require('./markdown')
|
||||
exports.api = require('./api')
|
||||
exports.date = require('./date')
|
||||
exports.table = require('./table')
|
||||
exports.xml = require('./xml')
|
||||
exports.zip = require('./zip')
|
||||
19
scripts/core/logger.js
Normal file
19
scripts/core/logger.js
Normal file
@@ -0,0 +1,19 @@
|
||||
const { Signale } = require('signale')
|
||||
|
||||
const options = {}
|
||||
|
||||
const logger = new Signale(options)
|
||||
|
||||
logger.config({
|
||||
displayLabel: false,
|
||||
displayScope: false,
|
||||
displayBadge: false
|
||||
})
|
||||
|
||||
logger.memoryUsage = function () {
|
||||
const used = process.memoryUsage().heapUsed / 1024 / 1024
|
||||
|
||||
logger.info(`memory: ${Math.round(used * 100) / 100} MB`)
|
||||
}
|
||||
|
||||
module.exports = logger
|
||||
10
scripts/core/markdown.js
Normal file
10
scripts/core/markdown.js
Normal file
@@ -0,0 +1,10 @@
|
||||
const markdownInclude = require('markdown-include')
|
||||
const file = require('./file')
|
||||
|
||||
const markdown = {}
|
||||
|
||||
markdown.compile = function (filepath) {
|
||||
markdownInclude.compileFiles(file.resolve(filepath))
|
||||
}
|
||||
|
||||
module.exports = markdown
|
||||
29
scripts/core/parser.js
Normal file
29
scripts/core/parser.js
Normal file
@@ -0,0 +1,29 @@
|
||||
const file = require('./file')
|
||||
const grabber = require('epg-grabber')
|
||||
|
||||
const parser = {}
|
||||
|
||||
parser.parseChannels = async function (filepath) {
|
||||
const content = await file.read(filepath)
|
||||
|
||||
return grabber.parseChannels(content)
|
||||
}
|
||||
|
||||
parser.parseLogs = async function (filepath) {
|
||||
const content = await file.read(filepath)
|
||||
if (!content) return []
|
||||
const lines = content.split('\n')
|
||||
|
||||
return lines.map(line => (line ? JSON.parse(line) : null)).filter(l => l)
|
||||
}
|
||||
|
||||
parser.parseNumber = function (string) {
|
||||
const parsed = parseInt(string)
|
||||
if (isNaN(parsed)) {
|
||||
throw new Error('scripts/core/parser.js:parseNumber() Input value is not a number')
|
||||
}
|
||||
|
||||
return parsed
|
||||
}
|
||||
|
||||
module.exports = parser
|
||||
47
scripts/core/table.js
Normal file
47
scripts/core/table.js
Normal file
@@ -0,0 +1,47 @@
|
||||
const table = {}
|
||||
|
||||
table.create = function (data, cols) {
|
||||
let output = '<table>\r\n'
|
||||
|
||||
output += ' <thead>\r\n <tr>'
|
||||
for (let column of cols) {
|
||||
output += `<th align="left">${column}</th>`
|
||||
}
|
||||
output += '</tr>\r\n </thead>\r\n'
|
||||
|
||||
output += ' <tbody>\r\n'
|
||||
output += getHTMLRows(data)
|
||||
output += ' </tbody>\r\n'
|
||||
|
||||
output += '</table>'
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
function getHTMLRows(data) {
|
||||
let output = ''
|
||||
for (let group of data) {
|
||||
let rowspan = group.length
|
||||
for (let [j, row] of group.entries()) {
|
||||
output += ' <tr>'
|
||||
for (let [i, value] of row.entries()) {
|
||||
if (i === 0 && j === 0) {
|
||||
output += `<td valign="top" rowspan="${rowspan}">${value}</td>`
|
||||
} else if (i > 0) {
|
||||
if (typeof value === 'number') {
|
||||
output += `<td align="right" nowrap>${value}</td>`
|
||||
} else {
|
||||
output += `<td nowrap>${value}</td>`
|
||||
}
|
||||
}
|
||||
}
|
||||
output += '</tr>\r\n'
|
||||
}
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
function getSpan() {}
|
||||
|
||||
module.exports = table
|
||||
29
scripts/core/timer.js
Normal file
29
scripts/core/timer.js
Normal file
@@ -0,0 +1,29 @@
|
||||
const { performance } = require('perf_hooks')
|
||||
const dayjs = require('dayjs')
|
||||
const duration = require('dayjs/plugin/duration')
|
||||
const relativeTime = require('dayjs/plugin/relativeTime')
|
||||
|
||||
dayjs.extend(relativeTime)
|
||||
dayjs.extend(duration)
|
||||
|
||||
const timer = {}
|
||||
|
||||
let t0 = 0
|
||||
|
||||
timer.start = function () {
|
||||
t0 = performance.now()
|
||||
}
|
||||
|
||||
timer.format = function (f) {
|
||||
let t1 = performance.now()
|
||||
|
||||
return dayjs.duration(t1 - t0).format(f)
|
||||
}
|
||||
|
||||
timer.humanize = function (suffix = true) {
|
||||
let t1 = performance.now()
|
||||
|
||||
return dayjs.duration(t1 - t0).humanize(suffix)
|
||||
}
|
||||
|
||||
module.exports = timer
|
||||
49
scripts/core/xml.js
Normal file
49
scripts/core/xml.js
Normal file
@@ -0,0 +1,49 @@
|
||||
const xml = {}
|
||||
|
||||
xml.create = function (items, site) {
|
||||
let output = `<?xml version="1.0" encoding="UTF-8"?>\r\n<site site="${site}">\r\n <channels>\r\n`
|
||||
|
||||
items.forEach(channel => {
|
||||
const logo = channel.logo ? ` logo="${channel.logo}"` : ''
|
||||
const xmltv_id = channel.xmltv_id || ''
|
||||
const lang = channel.lang || ''
|
||||
const site_id = channel.site_id || ''
|
||||
output += ` <channel lang="${lang}" xmltv_id="${escapeString(
|
||||
xmltv_id
|
||||
)}" site_id="${site_id}"${logo}>${escapeString(channel.name)}</channel>\r\n`
|
||||
})
|
||||
|
||||
output += ` </channels>\r\n</site>\r\n`
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
function escapeString(string, defaultValue = '') {
|
||||
if (!string) return defaultValue
|
||||
|
||||
const regex = new RegExp(
|
||||
'((?:[\0-\x08\x0B\f\x0E-\x1F\uFFFD\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]))|([\\x7F-\\x84]|[\\x86-\\x9F]|[\\uFDD0-\\uFDEF]|(?:\\uD83F[\\uDFFE\\uDFFF])|(?:\\uD87F[\\uDF' +
|
||||
'FE\\uDFFF])|(?:\\uD8BF[\\uDFFE\\uDFFF])|(?:\\uD8FF[\\uDFFE\\uDFFF])|(?:\\uD93F[\\uDFFE\\uD' +
|
||||
'FFF])|(?:\\uD97F[\\uDFFE\\uDFFF])|(?:\\uD9BF[\\uDFFE\\uDFFF])|(?:\\uD9FF[\\uDFFE\\uDFFF])' +
|
||||
'|(?:\\uDA3F[\\uDFFE\\uDFFF])|(?:\\uDA7F[\\uDFFE\\uDFFF])|(?:\\uDABF[\\uDFFE\\uDFFF])|(?:\\' +
|
||||
'uDAFF[\\uDFFE\\uDFFF])|(?:\\uDB3F[\\uDFFE\\uDFFF])|(?:\\uDB7F[\\uDFFE\\uDFFF])|(?:\\uDBBF' +
|
||||
'[\\uDFFE\\uDFFF])|(?:\\uDBFF[\\uDFFE\\uDFFF])(?:[\\0-\\t\\x0B\\f\\x0E-\\u2027\\u202A-\\uD7FF\\' +
|
||||
'uE000-\\uFFFF]|[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]|[\\uD800-\\uDBFF](?![\\uDC00-\\uDFFF])|' +
|
||||
'(?:[^\\uD800-\\uDBFF]|^)[\\uDC00-\\uDFFF]))',
|
||||
'g'
|
||||
)
|
||||
|
||||
string = String(string || '').replace(regex, '')
|
||||
|
||||
return string
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''')
|
||||
.replace(/\n|\r/g, ' ')
|
||||
.replace(/ +/g, ' ')
|
||||
.trim()
|
||||
}
|
||||
|
||||
module.exports = xml
|
||||
13
scripts/core/zip.js
Normal file
13
scripts/core/zip.js
Normal file
@@ -0,0 +1,13 @@
|
||||
const { gzip, ungzip } = require('node-gzip')
|
||||
|
||||
const zip = {}
|
||||
|
||||
zip.compress = async function (string) {
|
||||
return gzip(string)
|
||||
}
|
||||
|
||||
zip.decompress = async function (string) {
|
||||
return ungzip(string)
|
||||
}
|
||||
|
||||
module.exports = zip
|
||||
Reference in New Issue
Block a user