mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2025-03-20 02:17:37 +01:00
Remove ffbinaries local cache
This commit is contained in:
parent
0e62ccc7aa
commit
6afb8de3dd
@ -8,12 +8,11 @@ const { finished } = require('stream/promises')
|
|||||||
|
|
||||||
var API_URL = 'https://ffbinaries.com/api/v1'
|
var API_URL = 'https://ffbinaries.com/api/v1'
|
||||||
|
|
||||||
var LOCAL_CACHE_DIR = path.join(os.homedir() + '/.ffbinaries-cache')
|
|
||||||
var RUNTIME_CACHE = {}
|
var RUNTIME_CACHE = {}
|
||||||
var errorMsgs = {
|
var errorMsgs = {
|
||||||
connectionIssues: 'Couldn\'t connect to ffbinaries.com API. Check your Internet connection.',
|
connectionIssues: 'Couldn\'t connect to ffbinaries.com API. Check your Internet connection.',
|
||||||
parsingVersionData: 'Couldn\'t parse retrieved version data. Try "ffbinaries clearcache".',
|
parsingVersionData: 'Couldn\'t parse retrieved version data.',
|
||||||
parsingVersionList: 'Couldn\'t parse the list of available versions. Try "ffbinaries clearcache".',
|
parsingVersionList: 'Couldn\'t parse the list of available versions.',
|
||||||
notFound: 'Requested data not found.',
|
notFound: 'Requested data not found.',
|
||||||
incorrectVersionParam: '"version" parameter must be a string.'
|
incorrectVersionParam: '"version" parameter must be a string.'
|
||||||
}
|
}
|
||||||
@ -26,8 +25,6 @@ function ensureDirSync(dir) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ensureDirSync(LOCAL_CACHE_DIR)
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resolves the platform key based on input string
|
* Resolves the platform key based on input string
|
||||||
*/
|
*/
|
||||||
@ -195,7 +192,7 @@ async function downloadUrls(components, urls, opts) {
|
|||||||
|
|
||||||
|
|
||||||
async function extractZipToDestination(zipFilename) {
|
async function extractZipToDestination(zipFilename) {
|
||||||
const oldpath = path.join(LOCAL_CACHE_DIR, zipFilename)
|
const oldpath = path.join(destinationDir, zipFilename)
|
||||||
const zip = new StreamZip.async({ file: oldpath })
|
const zip = new StreamZip.async({ file: oldpath })
|
||||||
const count = await zip.extract(null, destinationDir)
|
const count = await zip.extract(null, destinationDir)
|
||||||
await zip.close()
|
await zip.close()
|
||||||
@ -246,25 +243,11 @@ async function downloadUrls(components, urls, opts) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// If there's no binary then check if the zip file is already in cache
|
|
||||||
const zipPath = path.join(LOCAL_CACHE_DIR, zipFilename)
|
|
||||||
if (await fse.pathExists(zipPath)) {
|
|
||||||
results.push({
|
|
||||||
filename: binFilename,
|
|
||||||
path: destinationDir,
|
|
||||||
status: 'File extracted to destination (archive found in cache)',
|
|
||||||
code: 'DONE_FROM_CACHE'
|
|
||||||
})
|
|
||||||
clearInterval(interval)
|
|
||||||
await extractZipToDestination(zipFilename)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// If zip is not cached then download it and store in cache
|
|
||||||
if (opts.quiet) clearInterval(interval)
|
if (opts.quiet) clearInterval(interval)
|
||||||
|
|
||||||
const cacheFileTempName = zipPath + '.part'
|
const zipPath = path.join(destinationDir, zipFilename)
|
||||||
const cacheFileFinalName = zipPath
|
const zipFileTempName = zipPath + '.part'
|
||||||
|
const zipFileFinalName = zipPath
|
||||||
|
|
||||||
const response = await axios({
|
const response = await axios({
|
||||||
url,
|
url,
|
||||||
@ -273,15 +256,15 @@ async function downloadUrls(components, urls, opts) {
|
|||||||
})
|
})
|
||||||
totalFilesize = response.headers?.['content-length'] || []
|
totalFilesize = response.headers?.['content-length'] || []
|
||||||
|
|
||||||
// Write to cacheFileTempName
|
const writer = fse.createWriteStream(zipFileTempName)
|
||||||
const writer = fse.createWriteStream(cacheFileTempName)
|
|
||||||
response.data.on('data', (chunk) => {
|
response.data.on('data', (chunk) => {
|
||||||
runningTotal += chunk.length
|
runningTotal += chunk.length
|
||||||
})
|
})
|
||||||
response.data.pipe(writer)
|
response.data.pipe(writer)
|
||||||
await finished(writer)
|
await finished(writer)
|
||||||
await fse.rename(cacheFileTempName, cacheFileFinalName)
|
await fse.rename(zipFileTempName, zipFileFinalName)
|
||||||
await extractZipToDestination(zipFilename)
|
await extractZipToDestination(zipFilename)
|
||||||
|
await fse.remove(zipFileFinalName)
|
||||||
|
|
||||||
results.push({
|
results.push({
|
||||||
filename: binFilename,
|
filename: binFilename,
|
||||||
@ -321,10 +304,6 @@ async function downloadBinaries(components, opts = {}) {
|
|||||||
return await downloadUrls(components, urls, opts)
|
return await downloadUrls(components, urls, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
function clearCache() {
|
|
||||||
fse.emptyDirSync(LOCAL_CACHE_DIR)
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
downloadBinaries: downloadBinaries,
|
downloadBinaries: downloadBinaries,
|
||||||
getVersionData: getVersionData,
|
getVersionData: getVersionData,
|
||||||
@ -332,6 +311,5 @@ module.exports = {
|
|||||||
listPlatforms: listPlatforms,
|
listPlatforms: listPlatforms,
|
||||||
detectPlatform: detectPlatform,
|
detectPlatform: detectPlatform,
|
||||||
resolvePlatform: resolvePlatform,
|
resolvePlatform: resolvePlatform,
|
||||||
getBinaryFilename: getBinaryFilename,
|
getBinaryFilename: getBinaryFilename
|
||||||
clearCache: clearCache
|
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user