feat: migrate-to-mongo 0.4.6

This commit is contained in:
Gabriel Pastori
2023-12-13 13:05:48 -03:00
parent a0cd4d370c
commit be7fea49a1
10 changed files with 2497 additions and 3388 deletions

View File

@@ -0,0 +1,44 @@
const FOLDER_PATH = [{
path: "/store/chats",
key: "chats"
}]
const fs = require("fs")
const path = require("path")
module.exports = async (instanceName, options, progressBars, conn) => {
var files = []
FOLDER_PATH.forEach(folder => {
files = files.concat(getFiles(instanceName, folder))
})
const progress = progressBars.create(files.length, 0)
progress.update({ process: 'Chats' })
for (const file of files) {
const collectionName = file.key
const collection = conn.collection(collectionName)
const data = JSON.parse(fs.readFileSync(file.path, 'utf8'))
data._id = file.path.split('\\').pop().split('.')[0]
await collection.findOneAndUpdate({ _id: data._id }, { $set: data }, { upsert: true })
progress.increment()
}
}
function getFiles(instanceName, opts) {
var files = []
const folder = opts.path || opts
const folderPath = path.join(process.cwd(), folder)
fs.readdirSync(folderPath).forEach(file => {
const filePath = path.join(folderPath, file)
if (fs.statSync(filePath).isDirectory()) {
files = files.concat(getFiles(instanceName, { ...opts, path: `${folder}/${file}` }))
} else if (file.includes(instanceName) || folder.includes(instanceName)) {
files.push({ ...opts, path: filePath })
}
})
return files
}

View File

@@ -0,0 +1,44 @@
const FOLDER_PATH = [{
path: "/store/contacts",
key: "contacts"
}]
const fs = require("fs")
const path = require("path")
module.exports = async (instanceName, options, progressBars, conn) => {
var files = []
FOLDER_PATH.forEach(folder => {
files = files.concat(getFiles(instanceName, folder))
})
const progress = progressBars.create(files.length, 0)
progress.update({ process: 'Contacts' })
for (const file of files) {
const collectionName = file.key
const collection = conn.collection(collectionName)
const data = JSON.parse(fs.readFileSync(file.path, 'utf8'))
data._id = file.path.split('\\').pop().split('.')[0]
await collection.findOneAndUpdate({ _id: data._id }, { $set: data }, { upsert: true })
progress.increment()
}
}
function getFiles(instanceName, opts) {
var files = []
const folder = opts.path || opts
const folderPath = path.join(process.cwd(), folder)
fs.readdirSync(folderPath).forEach(file => {
const filePath = path.join(folderPath, file)
if (fs.statSync(filePath).isDirectory()) {
files = files.concat(getFiles(instanceName, { ...opts, path: `${folder}/${file}` }))
} else if (file.includes(instanceName) || folder.includes(instanceName)) {
files.push({ ...opts, path: filePath })
}
})
return files
}

View File

@@ -0,0 +1,57 @@
const FOLDER_PATH = [
{
path: "/store/auth/apikey",
key: "authentication"
},
{
path: "/instances",
secondaryConnection: true
},
{ path: "/store/chamaai", key: "chamaai" },
{ path: "/store/chatwoot", key: "chatwoot" },
{ path: "/store/proxy", key: "proxy" },
{ path: "/store/rabbitmq", key: "rabbitmq" },
{ path: "/store/settings", key: "settings" },
{ path: "/store/typebot", key: "typebot" },
{ path: "/store/webhook", key: "webhook" },
{ path: "/store/websocket", key: "websocket" },
]
const fs = require("fs")
const path = require("path")
module.exports = async (instanceName, options, progressBars, conn, connInstance) => {
var files = []
FOLDER_PATH.forEach(folder => {
files = files.concat(getFiles(instanceName, folder))
})
const progress = progressBars.create(files.length, 0)
progress.update({ process: 'Instance' })
for (const file of files) {
const collectionName = file.key || instanceName
const collection = (!file.secondaryConnection ? conn : connInstance).collection(collectionName)
const data = JSON.parse(fs.readFileSync(file.path, 'utf8'))
data._id = file.path.split('\\').pop().split('.')[0]
await collection.findOneAndUpdate({ _id: data._id }, { $set: data }, { upsert: true })
progress.increment()
}
}
function getFiles(instanceName, opts) {
var files = []
const folder = opts.path || opts
const folderPath = path.join(process.cwd(), folder)
fs.readdirSync(folderPath).forEach(file => {
const filePath = path.join(folderPath, file)
if (fs.statSync(filePath).isDirectory()) {
files = files.concat(getFiles(instanceName, { ...opts, path: `${folder}/${file}` }))
} else if (file.includes(instanceName) || opts.path.includes(instanceName)) {
files.push({ ...opts, path: filePath })
}
})
return files
}

View File

@@ -0,0 +1,44 @@
const FOLDER_PATH = [{
path: "/store/message-up",
key: "messageUpdate"
}]
const fs = require("fs")
const path = require("path")
module.exports = async (instanceName, options, progressBars, conn) => {
var files = []
FOLDER_PATH.forEach(folder => {
files = files.concat(getFiles(instanceName, folder))
})
const progress = progressBars.create(files.length, 0)
progress.update({ process: 'Message Update' })
for (const file of files) {
const collectionName = file.key
const collection = conn.collection(collectionName)
const data = JSON.parse(fs.readFileSync(file.path, 'utf8'))
data._id = file.path.split('\\').pop().split('.')[0]
await collection.findOneAndUpdate({ _id: data._id }, { $set: data }, { upsert: true })
progress.increment()
}
}
function getFiles(instanceName, opts) {
var files = []
const folder = opts.path || opts
const folderPath = path.join(process.cwd(), folder)
fs.readdirSync(folderPath).forEach(file => {
const filePath = path.join(folderPath, file)
if (fs.statSync(filePath).isDirectory()) {
files = files.concat(getFiles(instanceName, { ...opts, path: `${folder}/${file}` }))
} else if (file.includes(instanceName) || folder.includes(instanceName)) {
files.push({ ...opts, path: filePath })
}
})
return files
}

View File

@@ -0,0 +1,44 @@
const FOLDER_PATH = [{
path: "/store/messages",
key: "messages"
}]
const fs = require("fs")
const path = require("path")
module.exports = async (instanceName, options, progressBars, conn) => {
var files = []
FOLDER_PATH.forEach(folder => {
files = files.concat(getFiles(instanceName, folder))
})
const progress = progressBars.create(files.length, 0)
progress.update({ process: 'Messages' })
for (const file of files) {
const collectionName = file.key
const collection = conn.collection(collectionName)
const data = JSON.parse(fs.readFileSync(file.path, 'utf8'))
data._id = file.path.split('\\').pop().split('.')[0]
await collection.findOneAndUpdate({ _id: data._id }, { $set: data }, { upsert: true })
progress.increment()
}
}
function getFiles(instanceName, opts) {
var files = []
const folder = opts.path || opts
const folderPath = path.join(process.cwd(), folder)
fs.readdirSync(folderPath).forEach(file => {
const filePath = path.join(folderPath, file)
if (fs.statSync(filePath).isDirectory()) {
files = files.concat(getFiles(instanceName, { ...opts, path: `${folder}/${file}` }))
} else if (file.includes(instanceName) || folder.includes(instanceName)) {
files.push({ ...opts, path: filePath })
}
})
return files
}