Skip to content

Commit 20f0acd

Browse files
committed
๐Ÿ”’ Add Private backup feature
1 parent 0113b92 commit 20f0acd

File tree

2 files changed

+15
-7
lines changed

2 files changed

+15
-7
lines changed

โ€Žapp.jsโ€Ž

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,10 @@ const Crawler = require('./crawler');
44

55
const program = new Command();
66

7-
program.version('1.0.0');
7+
program.version('1.0.1');
88
program.option('-u, --username <username>', 'velog ์œ ์ €์ด๋ฆ„');
99
program.option('-d, --delay <ms>', '์š”์ฒญ ๋”œ๋ ˆ์ด ์‹œ๊ฐ„')
10+
program.option('-c, --cert <access_token>', 'velog ์œ ์ € access_token')
1011

1112
program.parse(process.argv);
1213

@@ -16,6 +17,7 @@ program.parse(process.argv);
1617

1718
const crawler = new Crawler(program.username, {
1819
delay: program.delay || 0,
20+
cert: program.cert,
1921
});
2022

2123
console.log('๐Ÿ“™ ๋ฐฑ์—…์„ ์‹œ์ž‘ํ•ฉ๋‹ˆ๋‹ค / velog-backup');

โ€Žcrawler/index.jsโ€Ž

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,19 @@ const { join } = require('path');
55
const { PostsQuery, PostQuery } = require('./query');
66

77
class Crawler {
8-
constructor(username, { delay }) {
8+
constructor(username, { delay, cert }) {
99
this.username = username;
1010

1111
// options
1212
this.delay = delay;
13+
this.cert = cert;
1314

1415
this.__grahpqlURL = 'https://v2.velog.io/graphql';
16+
this.__api = axios.create({
17+
headers:{
18+
Cookie: cert ? `access_token=${cert};` : null,
19+
},
20+
});
1521
}
1622

1723
async parse() {
@@ -34,7 +40,7 @@ class Crawler {
3440
let posts = [];
3541

3642
try {
37-
await axios.get(url);
43+
await this.__api.get(url);
3844
} catch (e) {
3945
if (e.response.status === 404) {
4046
console.error(`โš ๏ธ ํ•ด๋‹น ์œ ์ €๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์–ด์š” \n username = ${this.username}`);
@@ -47,9 +53,9 @@ class Crawler {
4753
while (true) {
4854
try {
4955
if (response && response.data.data.posts.length >= 20) {
50-
response = await axios.post(this.__grahpqlURL, PostsQuery(this.username, posts[posts.length - 1].id));
56+
response = await this.__api.post(this.__grahpqlURL, PostsQuery(this.username, posts[posts.length - 1].id));
5157
} else {
52-
response = await axios.post(this.__grahpqlURL, PostsQuery(this.username));
58+
response = await this.__api.post(this.__grahpqlURL, PostsQuery(this.username));
5359
}
5460
} catch(e) {
5561
console.error(`โš ๏ธ ๋ฒจ๋กœ๊ทธ์—์„œ ๊ธ€ ๋ชฉ๋ก์„ ๊ฐ€์ ธ์˜ค๋Š”๋ฐ ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค. \n error = ${e}`);
@@ -69,7 +75,7 @@ class Crawler {
6975
let response;
7076

7177
try {
72-
response = await axios.post(this.__grahpqlURL, PostQuery(this.username, url_slug));
78+
response = await this.__api.post(this.__grahpqlURL, PostQuery(this.username, url_slug));
7379
} catch (e) {
7480
console.error(`โš ๏ธ ๋ฒจ๋กœ๊ทธ์—์„œ ๊ธ€์„ ๊ฐ€์ ธ์˜ค๋Š”๋ฐ ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค. \n error = ${e} url = ${url_slug}`);
7581
process.exit(1);
@@ -107,7 +113,7 @@ class Crawler {
107113
const filename = url.replace(/\/\s*$/,'').split('/').slice(-2).join('-').trim();
108114
const path = join('backup', 'images', decodeURI(filename));
109115

110-
axios({
116+
this.__api({
111117
method: 'get',
112118
url: encodeURI(decodeURI(url)),
113119
responseType: 'stream',

0 commit comments

Comments
ย (0)