Add gzip functionality to import/export scripts

This commit is contained in:
Cadence Ember 2020-07-13 01:46:06 +12:00
parent 82ee6e6d64
commit d76c4fae25
No known key found for this signature in database
GPG Key ID: 128B99B1B74A6412
3 changed files with 36 additions and 8 deletions

2
.gitignore vendored
View File

@ -8,7 +8,7 @@ node_modules
# Database stuff
db/**/*.db*
users_export.json
users_export.json*
*.log
*.csv

View File

@ -1,11 +1,23 @@
const fs = require("fs").promises
const {gzip} = require("zlib")
const {promisify: p} = require("util")
const pj = require("path").join
const db = require("../src/lib/db")
const targetDir = process.argv.slice(2).includes("--publish") ? "../src/site/html" : ".."
const shouldGzip = process.argv.slice(2).includes("--gzip")
const filename = "users_export.json" + (shouldGzip ? ".gz" : "")
const target = pj(__dirname, targetDir, filename)
;(async () => {
const users = db.prepare("SELECT * FROM Users").all()
const targetDir = process.argv.slice(2).includes("--publish") ? "../src/site/html" : ".."
const target = pj(__dirname, targetDir, "users_export.json")
fs.writeFile(target, JSON.stringify(users), {encoding: "utf8"})
let data = Buffer.from(JSON.stringify(users), "utf8")
if (shouldGzip) {
data = await p(gzip)(data)
}
await fs.writeFile(target, data)
console.log(`Users exported to ${target}`)
})()

View File

@ -1,10 +1,13 @@
const fs = require("fs").promises
const fs = require("fs")
const {createGunzip} = require("zlib")
const pj = require("path").join
const db = require("../src/lib/db")
const {request} = require("../src/lib/utils/request")
;(async () => {
const target = process.argv[2]
const isGzip = target.endsWith(".gz")
if (!target) {
console.log("Provide the file or URL to import from on the command line.")
process.exit(1)
@ -19,11 +22,24 @@ const {request} = require("../src/lib/utils/request")
const length = Number(Array.isArray(lengthContainer) ? lengthContainer[0] : lengthContainer)
console.log(`${Math.floor(length/1000)} kB will be downloaded`)
}
var usersString = await ref.text()
var usersStream = await ref.stream()
} else {
var usersString = await fs.readFile(target, {encoding: "utf8"})
/** @type {any} */
var usersStream = await fs.createReadStream(target)
}
if (isGzip) {
usersStream = usersStream.pipe(createGunzip())
}
// Read out the stream into a buffer
process.stdout.write("Reading data... ")
const buffers = []
usersStream.on("data", chunk => buffers.push(chunk))
await new Promise(resolve => usersStream.once("end", resolve))
const usersString = Buffer.concat(buffers).toString("utf8")
process.stdout.write("done.\n")
/** @type {{username: string, user_id: string, created: number, updated: number, updated_version: number, biography: string, post_count: number, following_count: number, followed_by_count: number, external_url: string, full_name: string, is_private: number, is_verified: number, profile_pic_url: string}[]} */
const incomingUsers = JSON.parse(usersString)
@ -49,7 +65,7 @@ const {request} = require("../src/lib/utils/request")
for (const user of incomingUsers) {
if (existing.has(user.user_id)) {
const existingRow = existing.get(user.user_id)
if (existingRow.updated_version <= user.updated_version && existingRow.updated <= user.updated) {
if (existingRow.updated_version <= user.updated_version && existingRow.updated < user.updated) {
preparedReplace.run(user)
overwrittenCount++
} else {