Browse Source

first

pull/1/head
david 3 years ago
commit
f979160335
  1. 4
      .gitignore
  2. 82
      apiHelper.js
  3. 26
      axios.js
  4. 15
      backupHelper.js
  5. 0
      backups/.keep
  6. 39
      index.js
  7. 2533
      package-lock.json
  8. 21
      package.json
  9. 18
      validation.js

4
.gitignore vendored

@ -0,0 +1,4 @@
.env
node_modules
backups/*
!backups/.keep

82
apiHelper.js

@ -0,0 +1,82 @@
import { axiosWrapper } from './axios.js';
import { exportDatabaseValidation } from './validation.js';
import fs from 'fs';
import path from 'path';
const getBackups = async () => {
try {
const { data } = await axiosWrapper({
method: 'get',
url: '/backups/available',
});
return data;
} catch (error) {
console.error(error);
return false;
}
};
const exportDatabase = async (tag) => {
try {
const { value: validValue, error } = exportDatabaseValidation({ tag });
if (error) throw error?.details[0]?.message;
const { data } = await axiosWrapper({
method: 'post',
url: '/backups/export/database',
payload: {
tag: validValue.tag, // 'Julyyy 24th 2021'
options: {
recipes: true,
settings: true,
pages: true,
themes: true,
groups: true,
users: true,
notifications: true,
},
templates: ['recipes.md'],
},
});
return data;
} catch (error) {
console.error(error);
return false;
}
};
const getFileToken = async (filename) => {
try {
const { data } = await axiosWrapper({
method: 'get',
url: `/backups/${filename}/download`,
});
return data;
} catch (error) {
console.error(error);
return false;
}
};
const downloadBackup = async (fileToken, fileName) => {
try {
const target_path = path.resolve(`backups/${fileName}`);
const writer = fs.createWriteStream(target_path, 'binary');
const streamResponse = await axiosWrapper({
method: 'get',
url: `/utils/download?token=${fileToken}`,
responseType: 'stream',
});
streamResponse.data.pipe(writer);
writer.on('finish', () => console.log(`Downloaded: ${fileName}`));
writer.on('error', () =>
console.error(`[ERROR] while dowloading ${fileName}`)
);
return true;
} catch (error) {
console.error(error);
return false;
}
};
export { getBackups, exportDatabase, getFileToken, downloadBackup };

26
axios.js

@ -0,0 +1,26 @@
import axios from 'axios';
import dotenv from 'dotenv';
dotenv.config();
const { BASE_URL, TOKEN } = process.env;
const config = {
baseURL: BASE_URL,
headers: { authorization: `Bearer ${TOKEN}` },
};
const axiosWrapper = async ({ method, url, payload, responseType }) => {
try {
return await axios({
method,
url,
...config,
data: { ...payload },
responseType,
});
} catch (error) {
console.error(error);
return null;
}
};
export { axiosWrapper };

15
backupHelper.js

@ -0,0 +1,15 @@
import dayjs from 'dayjs';
const backupTimeDiff = (lastDate) => {
try {
const now = dayjs();
const last = dayjs(lastDate);
const difference = now.diff(last, 'seconds');
return difference;
} catch (error) {
console.error(error);
return null;
}
};
export { backupTimeDiff };

0
backups/.keep

39
index.js

@ -0,0 +1,39 @@
import dotenv from 'dotenv';
dotenv.config();
import { environmentVariableValidation } from './validation.js';
import {
getBackups,
exportDatabase,
getFileToken,
downloadBackup,
} from './apiHelper.js';
import { backupTimeDiff } from './backupHelper.js';
const { value, error } = environmentVariableValidation({
baseUrl: process.env.BASE_URL,
access_token: process.env.TOKEN,
});
try {
if (error) throw error?.details[0]?.message;
} catch (error) {
console.error(`[ERROR] ${error}`);
process.exit(0);
}
const Main = async () => {
await new Promise((resolve) => setTimeout(resolve, 2000));
const backups = await getBackups();
const [lastBackup] = backups?.imports;
const secondsDiff = backupTimeDiff(lastBackup?.date);
// TODO: actually make this timer like idk 1 day or something
if (secondsDiff >= 20) {
const { export_path } = await exportDatabase('mealieDb');
console.log(`database backed up: ${export_path}`);
const { fileToken } = await getFileToken(lastBackup?.name);
await downloadBackup(fileToken, lastBackup?.name);
}
await Main(secondsDiff);
};
Main();

2533
package-lock.json generated

File diff suppressed because it is too large Load Diff

21
package.json

@ -0,0 +1,21 @@
{
"name": "mealiebackupservice",
"version": "1.0.0",
"description": "",
"main": "index.js",
"type": "module",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC",
"dependencies": {
"axios": "^0.21.4",
"dayjs": "^1.10.7",
"dotenv": "^10.0.0",
"joi": "^17.4.2"
},
"devDependencies": {
"nodemon": "^2.0.13"
}
}

18
validation.js

@ -0,0 +1,18 @@
import Joi from 'joi';
const environmentVariableValidation = (data) => {
const schmea = Joi.object({
baseUrl: Joi.string().uri().required(),
access_token: Joi.string().required(),
});
return schmea.validate(data);
};
const exportDatabaseValidation = (tag) => {
const schema = Joi.object({
tag: Joi.string().required().trim(),
});
return schema.validate(tag);
};
export { environmentVariableValidation, exportDatabaseValidation };
Loading…
Cancel
Save