1
0
forked from mirror/misskey

Refactor: Extract downloadTextFile function

This commit is contained in:
syuilo 2019-03-11 20:23:29 +09:00
parent 3dd5f313b7
commit a7e60f80bd
No known key found for this signature in database
GPG Key ID: BDC4C49D06AB9D69
2 changed files with 81 additions and 69 deletions

View File

@ -0,0 +1,79 @@
import * as tmp from 'tmp';
import * as fs from 'fs';
import * as util from 'util';
import chalk from 'chalk';
import * as request from 'request';
import Logger from '../services/logger';
import config from '../config';
const logger = new Logger('download-text-file');
export async function downloadTextFile(url: string): Promise<string> {
// Create temp file
const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
tmp.file((e, path, fd, cleanup) => {
if (e) return rej(e);
res([path, cleanup]);
});
});
logger.info(`Temp file is ${path}`);
// write content at URL to temp file
await new Promise((res, rej) => {
logger.info(`Downloading ${chalk.cyan(url)} ...`);
const writable = fs.createWriteStream(path);
writable.on('finish', () => {
logger.succ(`Download finished: ${chalk.cyan(url)}`);
res();
});
writable.on('error', error => {
logger.error(`Download failed: ${chalk.cyan(url)}: ${error}`, {
url: url,
e: error
});
rej(error);
});
const requestUrl = new URL(url).pathname.match(/[^\u0021-\u00ff]/) ? encodeURI(url) : url;
const req = request({
url: requestUrl,
proxy: config.proxy,
timeout: 10 * 1000,
headers: {
'User-Agent': config.userAgent
}
});
req.pipe(writable);
req.on('response', response => {
if (response.statusCode !== 200) {
logger.error(`Got ${response.statusCode} (${url})`);
writable.close();
rej(response.statusCode);
}
});
req.on('error', error => {
logger.error(`Failed to start download: ${chalk.cyan(url)}: ${error}`, {
url: url,
e: error
});
writable.close();
rej(error);
});
});
logger.succ(`Downloaded to: ${path}`);
const text = await util.promisify(fs.readFile)(path, 'utf8');
cleanup();
return text;
}

View File

@ -1,20 +1,16 @@
import * as Bull from 'bull'; import * as Bull from 'bull';
import * as tmp from 'tmp';
import * as fs from 'fs';
import * as util from 'util';
import * as mongo from 'mongodb'; import * as mongo from 'mongodb';
import * as request from 'request';
import { queueLogger } from '../../logger'; import { queueLogger } from '../../logger';
import User from '../../../models/user'; import User from '../../../models/user';
import config from '../../../config'; import config from '../../../config';
import UserList from '../../../models/user-list'; import UserList from '../../../models/user-list';
import DriveFile from '../../../models/drive-file'; import DriveFile from '../../../models/drive-file';
import chalk from 'chalk';
import { getOriginalUrl } from '../../../misc/get-drive-file-url'; import { getOriginalUrl } from '../../../misc/get-drive-file-url';
import parseAcct from '../../../misc/acct/parse'; import parseAcct from '../../../misc/acct/parse';
import resolveUser from '../../../remote/resolve-user'; import resolveUser from '../../../remote/resolve-user';
import { pushUserToUserList } from '../../../services/user-list/push'; import { pushUserToUserList } from '../../../services/user-list/push';
import { downloadTextFile } from '../../../misc/download-text-file';
const logger = queueLogger.createSubLogger('import-user-lists'); const logger = queueLogger.createSubLogger('import-user-lists');
@ -31,69 +27,7 @@ export async function importUserLists(job: Bull.Job, done: any): Promise<void> {
const url = getOriginalUrl(file); const url = getOriginalUrl(file);
// Create temp file const csv = await downloadTextFile(url);
const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
tmp.file((e, path, fd, cleanup) => {
if (e) return rej(e);
res([path, cleanup]);
});
});
logger.info(`Temp file is ${path}`);
// write content at URL to temp file
await new Promise((res, rej) => {
logger.info(`Downloading ${chalk.cyan(url)} ...`);
const writable = fs.createWriteStream(path);
writable.on('finish', () => {
logger.succ(`Download finished: ${chalk.cyan(url)}`);
res();
});
writable.on('error', error => {
logger.error(`Download failed: ${chalk.cyan(url)}: ${error}`, {
url: url,
e: error
});
rej(error);
});
const requestUrl = new URL(url).pathname.match(/[^\u0021-\u00ff]/) ? encodeURI(url) : url;
const req = request({
url: requestUrl,
proxy: config.proxy,
timeout: 10 * 1000,
headers: {
'User-Agent': config.userAgent
}
});
req.pipe(writable);
req.on('response', response => {
if (response.statusCode !== 200) {
logger.error(`Got ${response.statusCode} (${url})`);
writable.close();
rej(response.statusCode);
}
});
req.on('error', error => {
logger.error(`Failed to start download: ${chalk.cyan(url)}: ${error}`, {
url: url,
e: error
});
writable.close();
rej(error);
});
});
logger.succ(`Downloaded to: ${path}`);
const csv = await util.promisify(fs.readFile)(path, 'utf8');
for (const line of csv.trim().split('\n')) { for (const line of csv.trim().split('\n')) {
const listName = line.split(',')[0].trim(); const listName = line.split(',')[0].trim();
@ -132,6 +66,5 @@ export async function importUserLists(job: Bull.Job, done: any): Promise<void> {
} }
logger.succ('Imported'); logger.succ('Imported');
cleanup();
done(); done();
} }