Browse Source

Merge pull request 'add a logger, tidy and add more env vars, moved some things around' (#2) from develop into main

Reviewed-on: https://git.poyner.page/david/mealieBackupService/pulls/2
main
david 3 years ago
parent
commit
7df5187b63
  1. 132
      apiHelper.js
  2. 31
      axios.js
  3. 21
      backupHelper.js
  4. 20
      config.js
  5. 66
      index.js
  6. 66
      utils.js
  7. 9
      validation.js

132
apiHelper.js

@ -1,7 +1,9 @@
import { axiosWrapper } from './axios.js';
import { exportDatabaseValidation } from './validation.js';
import fs from 'fs';
import path from 'path';
import { axiosWrapper } from './axios.js';
import { exportDatabaseValidation } from './validation.js';
import { logger, validationErrorCheck } from './utils.js';
import { envVars } from './config.js';
const getBackupsRequest = async () => {
try {
@ -9,9 +11,10 @@ const getBackupsRequest = async () => {
method: 'get',
url: '/backups/available',
});
if (!data) throw 'failed to get a list of backups';
return data;
} catch (error) {
console.error(error);
logger({ type: 'error', msg: error });
return false;
}
};
@ -19,35 +22,77 @@ const getBackupsRequest = async () => {
const exportDatabaseRequest = async (tag) => {
try {
const timestampedTag = `${tag}_${Date.now()}`;
console.log(timestampedTag);
const { value: validValue, error } = exportDatabaseValidation({
tag: timestampedTag,
});
if (error) throw error?.details[0]?.message;
const {
data: { export_path },
} = await axiosWrapper({
method: 'post',
url: '/backups/export/database',
payload: {
tag: validValue.tag, // 'Julyyy 24th 2021'
options: {
recipes: true,
settings: true,
pages: true,
themes: true,
groups: true,
users: true,
notifications: true,
},
templates: ['recipes.md'],
},
});
console.log(`Database backed up to ${export_path}`);
return true;
validationErrorCheck(error);
const runBackup = async (attempt = 0) => {
/**
* mealie seems to get its knickers in a knot randomly
* noticing 500 internal server errors here
* and also with insomnia so thinken unrelated to axios
* so figured just try like 10 times
* but also for sure there's async/recussion issues here lmao
* eventually mealie would only return 500s when exporting
* and required a restart to fix
*/
try {
attempt++;
if (attempt <= envVars.EXPORT_ATTEMPTS) {
const result = await axiosWrapper({
method: 'post',
url: '/backups/export/database',
payload: {
tag: validValue.tag,
options: {
recipes: true,
settings: true,
pages: true,
themes: true,
groups: true,
users: true,
notifications: true,
},
templates: ['recipes.md'],
},
});
if (result?.status === 201) {
logger({
type: 'info',
msg: `db export was successful and saved at ${result?.data?.export_path}`,
});
return true;
}
if (result?.status !== 201) {
logger({
type: 'error',
msg: `export failed on attempt ${attempt}, will try again in 5 seconds`,
});
await new Promise((resolve) => setTimeout(resolve, 5000));
await runBackup(attempt);
}
}
if (attempt > envVars.EXPORT_ATTEMPTS) {
return false;
}
} catch (error) {
logger({
type: 'error',
msg: error,
});
return false;
}
};
return await runBackup();
} catch (error) {
console.error(error);
logger({
type: 'error',
msg: 'failed to export the db',
});
return false;
}
};
@ -60,7 +105,10 @@ const getFileTokenRequest = async (filename) => {
});
return data;
} catch (error) {
console.error(error);
logger({
type: 'error',
msg: 'failed to get a file token',
});
return false;
}
};
@ -85,13 +133,25 @@ const downloadBackupRequest = async (fileToken, fileName) => {
responseType: 'stream',
});
streamResponse.data.pipe(writer);
writer.on('finish', () => console.log(`Downloaded to ${target_path}`));
writer.on('finish', () =>
logger({
type: 'info',
msg: `Downloaded to ${target_path}`,
})
);
writer.on('error', () =>
console.error(`[ERROR] while dowloading ${fileName}`)
logger({
type: 'error',
msg: `trouble while dowloading ${fileName}`,
})
);
return true;
} catch (error) {
console.error(error);
logger({
type: 'error',
msg: 'failed to download a backup file',
});
return false;
}
};
@ -102,10 +162,16 @@ const backupDeleteRequest = async (filename) => {
method: 'delete',
url: `/backups/${filename}/delete`,
});
console.log(`Deleted ${filename} from mealie`);
logger({
type: 'warn',
msg: `Deleted ${filename} from mealie`,
});
return true;
} catch (error) {
console.error(error);
logger({
type: 'error',
msg: 'failed to delete a backup file',
});
return false;
}
};

31
axios.js

@ -1,33 +1,30 @@
import axios from 'axios';
import { environmentVariableValidation } from './validation.js';
import dotenv from 'dotenv';
dotenv.config();
const { BASE_URL, TOKEN } = process.env;
const { value, error } = environmentVariableValidation({
baseUrl: BASE_URL,
access_token: TOKEN,
});
if (error) throw error?.details[0]?.message;
import { envVars } from './config.js';
import { logger } from './utils.js';
const config = {
baseURL: value.baseUrl,
headers: { authorization: `Bearer ${value.access_token}` },
baseURL: envVars.BASE_URL,
headers: { authorization: `Bearer ${envVars.TOKEN}` },
};
const axiosWrapper = async ({ method, url, payload, responseType }) => {
const axiosWrapper = async ({
method,
url,
payload,
responseType = 'json',
}) => {
try {
return await axios({
const result = await axios({
method,
url,
...config,
data: { ...payload },
responseType,
});
return result;
} catch (error) {
console.error(error);
return null;
logger({ type: 'error', msg: 'axios has run into trouble' });
return error;
}
};

21
backupHelper.js

@ -1,21 +0,0 @@
import dayjs from 'dayjs';
const backupTimeDiff = (lastDate) => {
try {
const now = dayjs();
const last = dayjs(lastDate);
const difference = now.diff(last, 'seconds');
return difference;
} catch (error) {
console.error(error);
return null;
}
};
const backupsToDelete = (backupArray) => {
// get the list of backups to delete and return only the names
if (!backupArray) return [];
return backupArray.map(({ name }) => name);
};
export { backupTimeDiff, backupsToDelete };

20
config.js

@ -0,0 +1,20 @@
import dotenv from 'dotenv';
dotenv.config();
const {
BASE_URL,
TOKEN,
BACKUP_INTERVAL,
REMOTE_BACKUPS_MAX,
EXPORT_ATTEMPTS,
} = process.env;
const envVars = {
BASE_URL,
TOKEN,
BACKUP_INTERVAL,
REMOTE_BACKUPS_MAX,
EXPORT_ATTEMPTS,
};
export { envVars };

66
index.js

@ -1,6 +1,7 @@
import dotenv from 'dotenv';
dotenv.config();
import { envVars } from './config.js';
import { environmentVariableValidation } from './validation.js';
import { validationErrorCheck, logger } from './utils.js';
import { backupsToDelete, haveSomeCoffee } from './utils.js';
import {
getBackupsRequest,
exportDatabaseRequest,
@ -8,37 +9,50 @@ import {
downloadBackupRequest,
backupDeleteRequest,
} from './apiHelper.js';
import { backupTimeDiff, backupsToDelete } from './backupHelper.js';
const { value, error } = environmentVariableValidation({
baseUrl: process.env.BASE_URL,
access_token: process.env.TOKEN,
// check the env vars asap n complain if there's errors
const { error } = environmentVariableValidation({
...envVars,
});
try {
if (error) throw error?.details[0]?.message;
} catch (error) {
console.error(`[ERROR] ${error}`);
process.exit(0);
}
validationErrorCheck(error);
/**
* get a list of backups
* find the time difference since the last backup then decide to do stuff
* if there's a big enough difference download the last backup on mealie to local fs
* if there's a max backup var remove all the old remote backup files it finds
* probably also export the current db
*/
const Main = async () => {
await new Promise((resolve) => setTimeout(resolve, 10000));
const backups = await getBackupsRequest();
const [lastBackup, ...rest] = backups?.imports;
const backupsToRemove = backupsToDelete(rest.splice(10));
const secondsDiff = backupTimeDiff(lastBackup?.date);
// TODO: actually make this timer like idk 1 day or something
if (secondsDiff >= 10) {
await exportDatabaseRequest('mealieDb');
const response = await exportDatabaseRequest('mealieDb');
if (response) {
const backups = await getBackupsRequest();
const [lastBackup, ...rest] = backups ? backups?.imports : [];
const backupsToRemove = envVars.REMOTE_BACKUPS_MAX
? await backupsToDelete(rest.splice(envVars.REMOTE_BACKUPS_MAX))
: null;
const { fileToken } = await getFileTokenRequest(lastBackup?.name);
await downloadBackupRequest(fileToken, lastBackup?.name);
for (const file of backupsToRemove) {
await backupDeleteRequest(file);
await new Promise((resolve) => setTimeout(resolve, 300));
}
if (fileToken) await downloadBackupRequest(fileToken, lastBackup?.name);
if (backupsToRemove)
for (const file of backupsToRemove) {
await backupDeleteRequest(file);
await new Promise((resolve) => setTimeout(resolve, 300));
}
await haveSomeCoffee();
await Main();
}
if (!response) {
await haveSomeCoffee();
await Main();
}
await Main();
};
Main();

66
utils.js

@ -0,0 +1,66 @@
import dayjs from 'dayjs';
import { envVars } from './config.js';
const logger = ({ type = 'info', msg }) => {
const timestamp = new Date().toLocaleString();
let colour = '\x1b[39m';
let prefix;
switch (type) {
case 'error':
colour = '\x1b[91m';
prefix = '[ERROR] ';
break;
case 'warn':
colour = '\x1b[33m';
prefix = '[WARN] ';
break;
default:
colour = '\x1b[32m';
prefix = '[INFO] ';
}
console.log(colour + prefix + timestamp + ': ' + msg);
};
const validationErrorCheck = (error) => {
try {
if (error) throw error?.details[0]?.message;
} catch (error) {
logger({ type: 'error', msg: error });
process.exit(0);
}
};
// rubbish idea not being used
const backupTimeDiff = async (lastDate) => {
try {
const now = dayjs();
const last = dayjs(lastDate);
const difference = now.diff(last, 'seconds');
return difference;
} catch (error) {
logger({ type: 'error', msg: error });
return null;
}
};
const backupsToDelete = async (backupArray) => {
// get the list of backups to delete and return only the names
if (!backupArray) return [];
return backupArray.map(({ name }) => name);
};
const haveSomeCoffee = async () => {
await new Promise(
(resolve) => setTimeout(resolve, envVars.BACKUP_INTERVAL * 1000 * 60) // convert to mins
// (resolve) => setTimeout(resolve, 500) // for testing
);
};
export {
validationErrorCheck,
backupTimeDiff,
backupsToDelete,
haveSomeCoffee,
logger,
};

9
validation.js

@ -1,9 +1,14 @@
import Joi from 'joi';
// TODO: add a CUSTOM_TAG env var & validation
const environmentVariableValidation = (data) => {
const schmea = Joi.object({
baseUrl: Joi.string().uri().required(),
access_token: Joi.string().required(),
BASE_URL: Joi.string().uri().required(),
TOKEN: Joi.string().required(),
BACKUP_INTERVAL: Joi.number().default(1440).integer().min(1).max(10080),
REMOTE_BACKUPS_MAX: Joi.number().integer().min(5).max(1000),
EXPORT_ATTEMPTS: Joi.number().default(10).integer().min(1).max(100),
});
return schmea.validate(data);
};

Loading…
Cancel
Save