那么可不可以优化一下呢?写成一行要注意,因为使用了逗号,避免被认为是函数的参数分割,导致 yes 不被调用,可以加一个括号括起来。最外层的也可以写成一行,外层真写成一行,那就过分了。

  1. function ready() {
  2. let resolveFN, rejectFN
  3. let promise = new Promise(
  4. (resolve, reject) => ([resolveFN, rejectFN] = [resolve, reject])
  5. )
  6. return [resolveFN, rejectFN, promise]
  7. }
  8. // 保存文件
  9. const saveFile = (path, data) => {
  10. const [yes, no, wait] = ready()
  11. jsonfile.writeFile(path, data, e => (e && no(e), yes()))
  12. return wait
  13. }
  14. const sleep = time => {
  15. return wait
  16. }
  17. const exits = path => {
  18. const [yes, no, wait] = ready()
  19. fs.stat(path, (e, stats) => (e && no(e), yes(stats)))
  20. return wait
  21. }
  22. const ensureSavaPath = path => exits(path).catch(() => mkdir(path))

那么继续抽象的话,其实对于这种 node 统一风格的非常容器转换成 promise,使用 promisify 即可,类似于 RxjsbindNodeCallback

  1. const { promisify } = require('util')
  2. function ready() {
  3. let resolveFN, rejectFN
  4. let promise = new Promise(
  5. (resolve, reject) => ([resolveFN, rejectFN] = [resolve, reject])
  6. )
  7. const sleep = time => {
  8. const [yes, no, wait] = ready()
  9. setTimeout(yes, time)
  10. return wait
  11. }
  12. // 保存文件
  13. const saveFile = promisify(jsonfile.writeFile)
  14. const exits = promisify(fs.stat)
  15. const ensureSavaPath = path => exits(path).catch(() => mkdir(path))

核心逻辑

当下载内容时候,同样适用 craw 提供的爬取内容的规则。

  1. async function downloadText(chapter, crawl, index, opts) {
  2. const { path, charset } = opts
  3. const selector = await buildSelector(chapter.url, charset)
  4. const text = crawl.text(selector)
  5. }